answer
stringlengths
17
10.2M
package com.github.jcooky.mina.thrift; import org.apache.mina.core.buffer.IoBuffer; import org.apache.mina.core.service.IoHandlerAdapter; import org.apache.mina.core.session.IdleStatus; import org.apache.mina.core.session.IoSession; import org.apache.thrift.TProcessor; import org.apache.thrift.TProcessorFactory; import org.apache.thrift.protocol.TProtocolFactory; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TTransportFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TMinaThriftHandler extends IoHandlerAdapter { private static final Logger logger = LoggerFactory.getLogger(TMinaThriftHandler.class); private TProcessorFactory processorFactory; private TTransportFactory inputTransportFactory, outputTransportFactory; private TProtocolFactory inputProtocolFactory, outputProtocolFactory; public TMinaThriftHandler(TProcessorFactory processorFactory, TTransportFactory inputTransportFactory, TTransportFactory outputTransportFactory, TProtocolFactory inputProtocolFactory, TProtocolFactory outputProtocolFactory) { super(); this.processorFactory = processorFactory; this.inputTransportFactory = inputTransportFactory; this.outputTransportFactory = outputTransportFactory; this.inputProtocolFactory = inputProtocolFactory; this.outputProtocolFactory = outputProtocolFactory; } public void sessionCreated(IoSession session) throws Exception { } public void sessionOpened(IoSession session) throws Exception { TIoSessionTransport trans = new TIoSessionTransport(session); session.setAttribute(Constants.TRANSPORT, trans); session.setAttribute(Constants.BUFFER, null); } public void sessionClosed(IoSession session) throws Exception { } public void sessionIdle(IoSession session, IdleStatus status) throws Exception { } public void exceptionCaught(IoSession session, Throwable cause) throws Exception { } public void messageReceived(IoSession session, Object message) throws Exception { TIoSessionTransport transport = (TIoSessionTransport) session .getAttribute(Constants.TRANSPORT); session.setAttribute(Constants.BUFFER, (IoBuffer) message); TProcessor processor = this.processorFactory.getProcessor(transport); if (processor != null) { TTransport inputTransport = inputTransportFactory.getTransport(transport); TTransport outputTransport = outputTransportFactory.getTransport(transport); processor.process(inputProtocolFactory.getProtocol(inputTransport), outputProtocolFactory.getProtocol(outputTransport)); } else { throw new TTransportException("processor is null"); } } public void messageSent(IoSession session, Object message) throws Exception { } }
package org.apache.qpid.contrib.json; import java.io.IOException; import org.apache.qpid.contrib.json.processer.EventProcesser; import com.alibaba.fastjson.JSON; import com.rabbitmq.client.AMQP; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.Consumer; import com.rabbitmq.client.DefaultConsumer; import com.rabbitmq.client.Envelope; public class ReceiveMessageUtils { private Channel channel; private Connection connection; /** * * @param queueName * * @param eventProcesser * * @param clazz * * @throws Exception */ public void receiveMessage(String queueName, EventProcesser eventProcesser, Class<?> clazz) throws Exception { channel = connection.createChannel(); channel.queueDeclare(queueName, true, false, false, null); // System.out.println(" [*] Waiting for messages. To exit press // CTRL+C"); channel.basicQos(1);// RabbitMQ Consumer consumer = new DefaultConsumer(channel) { @Override public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { String message = new String(body); if (clazz != null) { eventProcesser.process(JSON.parseObject(message, clazz)); } else { eventProcesser.process(JSON.parse(message)); } } }; channel.basicConsume(queueName, true, consumer); } public ReceiveMessageUtils(Connection connection) { super(); this.connection = connection; } public void close() throws Exception { connection.close(); } }
package net.morimekta.console.terminal; import com.google.common.collect.ImmutableList; import net.morimekta.console.chr.Char; import net.morimekta.console.chr.CharUtil; import net.morimekta.console.test_utils.ConsoleWatcher; import net.morimekta.console.test_utils.FakeClock; import net.morimekta.console.test_utils.FakeScheduledExecutor; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; public class ProgressManagerTest { @Rule public ConsoleWatcher console = new ConsoleWatcher(); private FakeClock clock; private FakeScheduledExecutor executor; private Terminal terminal; @Before public void setUp() { clock = new FakeClock(); executor = new FakeScheduledExecutor(clock); terminal = new Terminal(console.tty()) { @Override protected void sleep(long millis) throws InterruptedException { clock.tick(millis); } }; } @After public void tearDown() throws IOException { terminal.close(); } @Test public void testSingleThread() throws IOException, InterruptedException, ExecutionException { ArrayList<ProgressManager.InternalTask<String>> started = new ArrayList<>(); try (ProgressManager progress = new ProgressManager(terminal, Progress.Spinner.ASCII, 1, executor, clock)) { Future<String> first = progress.addTask("First", 10000, (a, b) -> started.add((ProgressManager.InternalTask<String>) a)); Future<String> second = progress.addTask("Second", 10000, (a, b) -> started.add((ProgressManager.InternalTask<String>) a)); assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of())); clock.tick(250L); // does the render assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of("First: [ " -- And 1 more..."))); assertThat(started, hasSize(1)); started.get(0).accept(1000); clock.tick(250L); // does the render assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of("First: [ " -- And 1 more..."))); started.get(0).completeExceptionally(new Exception("Failed")); clock.tick(250L); // does the render assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of("First: [ "Second: [ assertThat(started, hasSize(2)); started.get(1).accept(1000); clock.tick(250L); // does the render assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of("First: [ "Second: [ started.get(1).complete("OK"); clock.tick(250L); // does the render try { first.get(); fail("No exception"); } catch (ExecutionException e) { assertThat(e.getCause().getMessage(), is("Failed")); } assertThat(second.get(), is("OK")); assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of("First: [ "Second: [ } } @Test public void testMultiThread() throws IOException, InterruptedException, ExecutionException, TimeoutException { try (ProgressManager progress = new ProgressManager(terminal, Progress.Spinner.ASCII)) { Future<String> first = progress.addTask("First", 10000, task -> { try { Thread.sleep(50); task.accept(1000); } catch (InterruptedException ignore) { } throw new RuntimeException("Failed"); }); Future<String> second = progress.addTask("Second", 10000, task -> { try { Thread.sleep(50); task.accept(1000); Thread.sleep(150); task.accept(10000); } catch (InterruptedException ignore) { } return "OK"; }); assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of())); Thread.sleep(35L); assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of("First: [ "Second: [ Thread.sleep(100L); assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of("First: [ "Second: [ progress.waitAbortable(); try { first.get(10L, MILLISECONDS); fail("No exception"); } catch (ExecutionException e) { assertThat(e.getCause().getMessage(), is("Failed")); } assertThat(second.get(10L, MILLISECONDS), is("OK")); assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of("First: [ "Second: [ } } @Test public void testAbort() throws IOException, InterruptedException, ExecutionException { console.setInput(Char.ABR); try (ProgressManager progress = new ProgressManager(terminal, Progress.Spinner.ASCII)) { Future<String> first = progress.addTask("First", 10000, task -> { Thread.sleep(20); task.accept(1000); throw new RuntimeException("Failed"); }); Future<String> second = progress.addTask("Second", 10000, task -> { Thread.sleep(50); task.accept(1000); Thread.sleep(520); task.accept(10000); return "OK"; }); assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of())); Thread.sleep(100L); try { progress.waitAbortable(); fail("No exception"); } catch (IOException e) { assertThat(e.getMessage(), is("Aborted with '<ABR>'")); } assertThat(first.isDone(), is(true)); try { first.get(); fail("No exception"); } catch (ExecutionException e) { assertThat(e.getCause().getMessage(), is("Failed")); } assertThat(second.isCancelled(), is(true)); try { String s = second.get(); fail("No exception: " + s); } catch (CancellationException e) { // nothing to verify on exception. } assertThat(stripNonPrintableLines(progress.lines()), is(ImmutableList.of("First: [ "Second: [ } } private static List<String> stripNonPrintableLines(List<String> lines) { return ImmutableList.copyOf(lines.stream().map(CharUtil::stripNonPrintable).collect(Collectors.toList())); } }
package org.clapper.curn.parser.informa; import org.clapper.curn.parser.RSSParser; import org.clapper.curn.parser.RSSChannel; import org.clapper.curn.parser.RSSParserException; import de.nava.informa.core.ChannelIF; import de.nava.informa.core.ParseException; import de.nava.informa.parsers.FeedParser; import de.nava.informa.impl.basic.ChannelBuilder; import org.apache.commons.logging.LogFactory; import java.io.InputStream; import java.io.InputStreamReader; import java.io.IOException; import java.io.Reader; public class RSSParserAdapter implements RSSParser { /** * Default constructor. */ public RSSParserAdapter() { // Disable Informa logging for now. LogFactory logFactory = LogFactory.getFactory(); logFactory.setAttribute ("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.NoOpLog"); } /** * Parse an RSS feed. * * @param stream the <tt>InputStream</tt> for the feed * @param encoding the encoding of the data in the field, if known, or * null * * @return an <tt>RSSChannel</tt> object representing the RSS data from * the site. * * @throws IOException unable to read from URL * @throws RSSParserException unable to parse RSS XML */ public RSSChannel parseRSSFeed (InputStream stream, String encoding) throws IOException, RSSParserException { try { ChannelBuilder builder = new ChannelBuilder(); ChannelIF channel; Reader reader; if (encoding == null) reader = new InputStreamReader (stream); else reader = new InputStreamReader (stream, encoding); channel = FeedParser.parse (builder, reader); return new RSSChannelAdapter (channel); } catch (ParseException ex) { throw new RSSParserException (ex); } } }
package org.ligi.snackengage; public class Dependencies { public static class Android { public static final String APPLICATION_ID = "org.ligi.snackengage"; public static final String BUILD_TOOLS_VERSION = "29.0.3"; public static final int MIN_SDK_VERSION = 14; public static final int COMPILE_SDK_VERSION = 28; public static final int TARGET_SDK_VERSION = 28; public static final int VERSION_CODE = 24; public static final String VERSION_NAME = "0.24"; } public static class GradlePlugins { public static final String ANDROID = "com.android.tools.build:gradle:4.0.1"; public static final String MAVEN = "com.github.dcendents:android-maven-gradle-plugin:2.1"; public static final String VERSIONS = "com.github.ben-manes:gradle-versions-plugin:0.29.0"; } public static class Libs { public static final String ANNOTATION = "androidx.annotation:annotation:1.1.0"; public static final String APPCOMPAT = "androidx.appcompat:appcompat:1.1.0"; public static final String ASSERTJ_ANDROID = "com.squareup.assertj:assertj-android:1.2.0"; public static final String JUNIT = "junit:junit:4.13"; public static final String MATERIAL = "com.google.android.material:material:1.1.0"; public static final String MOCKITO = "org.mockito:mockito-core:3.3.0"; } }
package org.chromium.content.browser; import android.app.Activity; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.graphics.Color; import android.os.Handler; import android.os.Message; import android.os.RemoteException; import android.util.Log; import android.view.Gravity; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.Surface; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.LinearLayout; import android.widget.MediaController; import android.widget.MediaController.MediaPlayerControl; import android.widget.ProgressBar; import android.widget.TextView; import java.lang.ref.WeakReference; import org.chromium.base.CalledByNative; import org.chromium.base.JNINamespace; import org.chromium.content.common.IChildProcessService; import org.chromium.content.R; @JNINamespace("content") public class ContentVideoView extends FrameLayout implements MediaPlayerControl, SurfaceHolder.Callback, View.OnTouchListener, View.OnKeyListener { private static final String TAG = "ContentVideoView"; /* Do not change these values without updating their counterparts * in include/media/mediaplayer.h! */ private static final int MEDIA_NOP = 0; // interface test message private static final int MEDIA_PREPARED = 1; private static final int MEDIA_PLAYBACK_COMPLETE = 2; private static final int MEDIA_BUFFERING_UPDATE = 3; private static final int MEDIA_SEEK_COMPLETE = 4; private static final int MEDIA_SET_VIDEO_SIZE = 5; private static final int MEDIA_ERROR = 100; private static final int MEDIA_INFO = 200; /** The video is streamed and its container is not valid for progressive * playback i.e the video's index (e.g moov atom) is not at the start of the * file. */ public static final int MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK = 2; // all possible internal states private static final int STATE_ERROR = -1; private static final int STATE_IDLE = 0; private static final int STATE_PLAYING = 1; private static final int STATE_PAUSED = 2; private static final int STATE_PLAYBACK_COMPLETED = 3; private SurfaceHolder mSurfaceHolder = null; private int mVideoWidth = 0; private int mVideoHeight = 0; private int mCurrentBufferPercentage; private int mDuration; private MediaController mMediaController = null; private boolean mCanPause; private boolean mCanSeekBack; private boolean mCanSeekForward; // Native pointer to C++ ContentVideoView object. private int mNativeContentVideoView = 0; // webkit should have prepared the media private int mCurrentState = STATE_IDLE; // Strings for displaying media player errors static String mPlaybackErrorText; static String mUnknownErrorText; static String mErrorButton; static String mErrorTitle; static String mVideoLoadingText; // This view will contain the video. private VideoSurfaceView mVideoSurfaceView; // Progress view when the video is loading. private View mProgressView; private Surface mSurface = null; // There are can be at most 1 fullscreen video // TODO(qinmin): will change this once we move the creation of this class // to the host application private static ContentVideoView sContentVideoView = null; // The delegate will follow sContentVideoView. We would need to // move this to an instance variable if we allow multiple ContentVideoViews. private static ContentVideoViewContextDelegate sDelegate = null; private class VideoSurfaceView extends SurfaceView { public VideoSurfaceView(Context context) { super(context); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (mVideoWidth == 0 && mVideoHeight == 0) { setMeasuredDimension(1, 1); return; } int width = getDefaultSize(mVideoWidth, widthMeasureSpec); int height = getDefaultSize(mVideoHeight, heightMeasureSpec); if (mVideoWidth > 0 && mVideoHeight > 0) { if ( mVideoWidth * height > width * mVideoHeight ) { height = width * mVideoHeight / mVideoWidth; } else if ( mVideoWidth * height < width * mVideoHeight ) { width = height * mVideoWidth / mVideoHeight; } } setMeasuredDimension(width, height); } } private static class ProgressView extends LinearLayout { private ProgressBar mProgressBar; private TextView mTextView; public ProgressView(Context context) { super(context); setOrientation(LinearLayout.VERTICAL); setLayoutParams(new LinearLayout.LayoutParams( LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT)); mProgressBar = new ProgressBar(context, null, android.R.attr.progressBarStyleLarge); mTextView = new TextView(context); mTextView.setText(mVideoLoadingText); addView(mProgressBar); addView(mTextView); } } private static class FullScreenMediaController extends MediaController { View mVideoView; public FullScreenMediaController(Context context, View video) { super(context); mVideoView = video; } @Override public void show() { super.show(); if (mVideoView != null) { mVideoView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_VISIBLE); } } @Override public void hide() { if (mVideoView != null) { mVideoView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LOW_PROFILE); } super.hide(); } } private Runnable mExitFullscreenRunnable = new Runnable() { @Override public void run() { destroyContentVideoView(); } }; public ContentVideoView(Context context) { this(context, 0); } private ContentVideoView(Context context, int nativeContentVideoView) { super(context); initResources(context); if (nativeContentVideoView == 0) return; mNativeContentVideoView = nativeContentVideoView; mCurrentBufferPercentage = 0; mVideoSurfaceView = new VideoSurfaceView(context); } private static void initResources(Context context) { if (mPlaybackErrorText != null) return; mPlaybackErrorText = context.getString( org.chromium.content.R.string.media_player_error_text_invalid_progressive_playback); mUnknownErrorText = context.getString( org.chromium.content.R.string.media_player_error_text_unknown); mErrorButton = context.getString( org.chromium.content.R.string.media_player_error_button); mErrorTitle = context.getString( org.chromium.content.R.string.media_player_error_title); mVideoLoadingText = context.getString( org.chromium.content.R.string.media_player_loading_video); } void showContentVideoView() { FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT, Gravity.CENTER); this.addView(mVideoSurfaceView, layoutParams); View progressView = sDelegate.getVideoLoadingProgressView(); if (progressView != null) { mProgressView = progressView; } else { mProgressView = new ProgressView(getContext()); } this.addView(mProgressView, layoutParams); mVideoSurfaceView.setZOrderOnTop(true); mVideoSurfaceView.setOnKeyListener(this); mVideoSurfaceView.setOnTouchListener(this); mVideoSurfaceView.getHolder().addCallback(this); mVideoSurfaceView.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); mVideoSurfaceView.setFocusable(true); mVideoSurfaceView.setFocusableInTouchMode(true); mVideoSurfaceView.requestFocus(); } @CalledByNative public void onMediaPlayerError(int errorType) { Log.d(TAG, "OnMediaPlayerError: " + errorType); if (mCurrentState == STATE_ERROR || mCurrentState == STATE_PLAYBACK_COMPLETED) { return; } mCurrentState = STATE_ERROR; if (mMediaController != null) { mMediaController.hide(); } /* Pop up an error dialog so the user knows that * something bad has happened. Only try and pop up the dialog * if we're attached to a window. When we're going away and no * longer have a window, don't bother showing the user an error. * * TODO(qinmin): We need to review whether this Dialog is OK with * the rest of the browser UI elements. */ if (getWindowToken() != null) { String message; if (errorType == MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK) { message = mPlaybackErrorText; } else { message = mUnknownErrorText; } new AlertDialog.Builder(getContext()) .setTitle(mErrorTitle) .setMessage(message) .setPositiveButton(mErrorButton, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { /* Inform that the video is over. */ onCompletion(); } }) .setCancelable(false) .show(); } } @CalledByNative public void onVideoSizeChanged(int width, int height) { mVideoWidth = width; mVideoHeight = height; if (mVideoWidth != 0 && mVideoHeight != 0) { mVideoSurfaceView.getHolder().setFixedSize(mVideoWidth, mVideoHeight); } } @CalledByNative public void onBufferingUpdate(int percent) { mCurrentBufferPercentage = percent; } @CalledByNative public void onPlaybackComplete() { onCompletion(); } @CalledByNative public void updateMediaMetadata( int videoWidth, int videoHeight, int duration, boolean canPause, boolean canSeekBack, boolean canSeekForward) { mProgressView.setVisibility(View.GONE); mDuration = duration; mCanPause = canPause; mCanSeekBack = canSeekBack; mCanSeekForward = canSeekForward; mCurrentState = isPlaying() ? STATE_PLAYING : STATE_PAUSED; if (mMediaController != null) { mMediaController.setEnabled(true); // If paused , should show the controller for ever. if (isPlaying()) mMediaController.show(); else mMediaController.show(0); } onVideoSizeChanged(videoWidth, videoHeight); } public void destroyNativeView() { if (mNativeContentVideoView != 0) { mNativeContentVideoView = 0; destroyContentVideoView(); } } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { mVideoSurfaceView.setFocusable(true); mVideoSurfaceView.setFocusableInTouchMode(true); if (isInPlaybackState() && mMediaController != null) { mMediaController.show(); } } @Override public void surfaceCreated(SurfaceHolder holder) { mSurfaceHolder = holder; openVideo(); } @Override public void surfaceDestroyed(SurfaceHolder holder) { mSurfaceHolder = null; if (mNativeContentVideoView != 0) { nativeExitFullscreen(mNativeContentVideoView, true); mNativeContentVideoView = 0; post(mExitFullscreenRunnable); } removeMediaController(); } public void setMediaController(MediaController controller) { if (mMediaController != null) { mMediaController.hide(); } mMediaController = controller; attachMediaController(); } private void attachMediaController() { if (mMediaController != null) { mMediaController.setMediaPlayer(this); mMediaController.setAnchorView(mVideoSurfaceView); mMediaController.setEnabled(false); } } @CalledByNative public void openVideo() { if (mSurfaceHolder != null) { mCurrentState = STATE_IDLE; setMediaController(new FullScreenMediaController(sDelegate.getContext(), this)); if (mNativeContentVideoView != 0) { nativeUpdateMediaMetadata(mNativeContentVideoView); } mCurrentBufferPercentage = 0; if (mNativeContentVideoView != 0) { nativeSetSurface(mNativeContentVideoView, mSurfaceHolder.getSurface()); } } } private void onCompletion() { mCurrentState = STATE_PLAYBACK_COMPLETED; if (mMediaController != null) { mMediaController.hide(); } } @Override public boolean onTouch(View v, MotionEvent event) { if (isInPlaybackState() && mMediaController != null && event.getAction() == MotionEvent.ACTION_DOWN) { toggleMediaControlsVisiblity(); } return true; } @Override public boolean onTrackballEvent(MotionEvent ev) { if (isInPlaybackState() && mMediaController != null) { toggleMediaControlsVisiblity(); } return false; } @Override public boolean onKey(View v, int keyCode, KeyEvent event) { boolean isKeyCodeSupported = keyCode != KeyEvent.KEYCODE_BACK && keyCode != KeyEvent.KEYCODE_VOLUME_UP && keyCode != KeyEvent.KEYCODE_VOLUME_DOWN && keyCode != KeyEvent.KEYCODE_VOLUME_MUTE && keyCode != KeyEvent.KEYCODE_CALL && keyCode != KeyEvent.KEYCODE_MENU && keyCode != KeyEvent.KEYCODE_SEARCH && keyCode != KeyEvent.KEYCODE_ENDCALL; if (isInPlaybackState() && isKeyCodeSupported && mMediaController != null) { if (keyCode == KeyEvent.KEYCODE_HEADSETHOOK || keyCode == KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE) { if (isPlaying()) { pause(); mMediaController.show(); } else { start(); mMediaController.hide(); } return true; } else if (keyCode == KeyEvent.KEYCODE_MEDIA_PLAY) { if (!isPlaying()) { start(); mMediaController.hide(); } return true; } else if (keyCode == KeyEvent.KEYCODE_MEDIA_STOP || keyCode == KeyEvent.KEYCODE_MEDIA_PAUSE) { if (isPlaying()) { pause(); mMediaController.show(); } return true; } else { toggleMediaControlsVisiblity(); } } else if (keyCode == KeyEvent.KEYCODE_BACK && event.getAction() == KeyEvent.ACTION_UP) { if (mNativeContentVideoView != 0) { nativeExitFullscreen(mNativeContentVideoView, false); destroyNativeView(); } return true; } else if (keyCode == KeyEvent.KEYCODE_MENU || keyCode == KeyEvent.KEYCODE_SEARCH) { return true; } return super.onKeyDown(keyCode, event); } private void toggleMediaControlsVisiblity() { if (mMediaController.isShowing()) { mMediaController.hide(); } else { mMediaController.show(); } } private boolean isInPlaybackState() { return (mCurrentState != STATE_ERROR && mCurrentState != STATE_IDLE); } public void start() { if (isInPlaybackState()) { if (mNativeContentVideoView != 0) { nativePlay(mNativeContentVideoView); } mCurrentState = STATE_PLAYING; } } public void pause() { if (isInPlaybackState()) { if (isPlaying()) { if (mNativeContentVideoView != 0) { nativePause(mNativeContentVideoView); } mCurrentState = STATE_PAUSED; } } } // cache duration as mDuration for faster access public int getDuration() { if (isInPlaybackState()) { if (mDuration > 0) { return mDuration; } if (mNativeContentVideoView != 0) { mDuration = nativeGetDurationInMilliSeconds(mNativeContentVideoView); } else { mDuration = 0; } return mDuration; } mDuration = -1; return mDuration; } public int getCurrentPosition() { if (isInPlaybackState() && mNativeContentVideoView != 0) { return nativeGetCurrentPosition(mNativeContentVideoView); } return 0; } public void seekTo(int msec) { if (mNativeContentVideoView != 0) { nativeSeekTo(mNativeContentVideoView, msec); } } public boolean isPlaying() { return mNativeContentVideoView != 0 && nativeIsPlaying(mNativeContentVideoView); } public int getBufferPercentage() { return mCurrentBufferPercentage; } public boolean canPause() { return mCanPause; } public boolean canSeekBackward() { return mCanSeekBack; } public boolean canSeekForward() { return mCanSeekForward; } public int getAudioSessionId() { return 0; } @CalledByNative public static ContentVideoView createContentVideoView(int nativeContentVideoView) { if (sContentVideoView != null) return sContentVideoView; if (sDelegate != null && sDelegate.getContext() != null) { sContentVideoView = new ContentVideoView(sDelegate.getContext(), nativeContentVideoView); sDelegate.onShowCustomView(sContentVideoView); sContentVideoView.setBackgroundColor(Color.BLACK); sContentVideoView.showContentVideoView(); sContentVideoView.setVisibility(View.VISIBLE); return sContentVideoView; } return null; } public void removeMediaController() { if (mMediaController != null) { mMediaController.setEnabled(false); mMediaController.hide(); mMediaController = null; } } public void removeSurfaceView() { removeView(mVideoSurfaceView); removeView(mProgressView); mVideoSurfaceView = null; mProgressView = null; } @CalledByNative public static void destroyContentVideoView() { sDelegate.onDestroyContentVideoView(); if (sContentVideoView != null) { sContentVideoView.removeMediaController(); sContentVideoView.removeSurfaceView(); sContentVideoView.setVisibility(View.GONE); } sContentVideoView = null; } public static ContentVideoView getContentVideoView() { return sContentVideoView; } public static void registerContentVideoViewContextDelegate( ContentVideoViewContextDelegate delegate) { sDelegate = delegate; } @Override public boolean onTouchEvent(MotionEvent ev) { return true; } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK && event.getAction() == KeyEvent.ACTION_UP) { destroyContentVideoView(); return true; } return super.onKeyDown(keyCode, event); } private native void nativeExitFullscreen(int nativeContentVideoView, boolean relaseMediaPlayer); private native int nativeGetCurrentPosition(int nativeContentVideoView); private native int nativeGetDurationInMilliSeconds(int nativeContentVideoView); private native void nativeUpdateMediaMetadata(int nativeContentVideoView); private native int nativeGetVideoWidth(int nativeContentVideoView); private native int nativeGetVideoHeight(int nativeContentVideoView); private native boolean nativeIsPlaying(int nativeContentVideoView); private native void nativePause(int nativeContentVideoView); private native void nativePlay(int nativeContentVideoView); private native void nativeSeekTo(int nativeContentVideoView, int msec); private native void nativeSetSurface(int nativeContentVideoView, Surface surface); }
package org.eclipse.imp.pdb.facts.io.binary; import java.io.IOException; import java.io.InputStream; import java.math.BigDecimal; import java.math.BigInteger; import java.net.URI; import java.net.URISyntaxException; import java.util.Map; import java.util.Map.Entry; import org.eclipse.imp.pdb.facts.IBool; import org.eclipse.imp.pdb.facts.IConstructor; import org.eclipse.imp.pdb.facts.IDateTime; import org.eclipse.imp.pdb.facts.IInteger; import org.eclipse.imp.pdb.facts.IList; import org.eclipse.imp.pdb.facts.IListWriter; import org.eclipse.imp.pdb.facts.IMap; import org.eclipse.imp.pdb.facts.IMapWriter; import org.eclipse.imp.pdb.facts.INode; import org.eclipse.imp.pdb.facts.IRational; import org.eclipse.imp.pdb.facts.IReal; import org.eclipse.imp.pdb.facts.ISet; import org.eclipse.imp.pdb.facts.ISetWriter; import org.eclipse.imp.pdb.facts.ISourceLocation; import org.eclipse.imp.pdb.facts.IString; import org.eclipse.imp.pdb.facts.ITuple; import org.eclipse.imp.pdb.facts.IValue; import org.eclipse.imp.pdb.facts.IValueFactory; import org.eclipse.imp.pdb.facts.exceptions.FactParseError; import org.eclipse.imp.pdb.facts.type.Type; import org.eclipse.imp.pdb.facts.type.TypeFactory; import org.eclipse.imp.pdb.facts.type.TypeStore; import org.eclipse.imp.pdb.facts.util.ResizingArray; import org.eclipse.imp.pdb.facts.util.ShareableHashMap; // TODO Change this thing so it doesn't use recursion. /** * @author Arnold Lankamp */ public class BinaryReader{ private final static int DEFAULT_SHARED_VALUES_STORE_SIZE = 1024; private final static int DEFAULT_SHARED_TYPES_STORE_SIZE = 128; private final static int DEFAULT_SHARED_PATHS_STORE_SIZE = 128; private final static int DEFAULT_SHARED_NAMES_STORE_SIZE = 128; private final static int BOOL_HEADER = 0x01; private final static int INTEGER_HEADER = 0x02; private final static int BIG_INTEGER_HEADER = 0x03; // Special case of INTEGER_HEADER (flags for alternate encoding). private final static int DOUBLE_HEADER = 0x04; private final static int IEEE754_ENCODED_DOUBLE_HEADER = 0x14; private final static int STRING_HEADER = 0x05; private final static int SOURCE_LOCATION_HEADER = 0x06; private final static int DATE_TIME_HEADER = 0x10; private final static int TUPLE_HEADER = 0x07; private final static int NODE_HEADER = 0x08; private final static int ANNOTATED_NODE_HEADER = 0x09; private final static int CONSTRUCTOR_HEADER = 0x0a; private final static int ANNOTATED_CONSTRUCTOR_HEADER = 0x0b; private final static int LIST_HEADER = 0x0c; private final static int SET_HEADER = 0x0d; private final static int RELATION_HEADER = 0x0e; private final static int MAP_HEADER = 0x0f; private final static int RATIONAL_HEADER = 0x11; private final static int VALUE_TYPE_HEADER = 0x01; private final static int VOID_TYPE_HEADER = 0x02; private final static int BOOL_TYPE_HEADER = 0x03; private final static int INTEGER_TYPE_HEADER = 0x04; private final static int DOUBLE_TYPE_HEADER = 0x05; private final static int STRING_TYPE_HEADER = 0x06; private final static int SOURCE_LOCATION_TYPE_HEADER = 0x07; private final static int DATE_TIME_TYPE_HEADER = 0x14; private final static int NODE_TYPE_HEADER = 0x08; private final static int TUPLE_TYPE_HEADER = 0x09; private final static int LIST_TYPE_HEADER = 0x0a; private final static int SET_TYPE_HEADER = 0x0b; private final static int RELATION_TYPE_HEADER = 0x0c; private final static int MAP_TYPE_HEADER = 0x0d; private final static int PARAMETER_TYPE_HEADER = 0x0e; private final static int ADT_TYPE_HEADER = 0x0f; private final static int CONSTRUCTOR_TYPE_HEADER = 0x10; private final static int ALIAS_TYPE_HEADER = 0x11; private final static int ANNOTATED_NODE_TYPE_HEADER = 0x12; private final static int ANNOTATED_CONSTRUCTOR_TYPE_HEADER = 0x13; private final static int RATIONAL_TYPE_HEADER = 0x15; private final static int TYPE_MASK = 0x1f; private final static int SHARED_FLAG = 0x80; private final static int TYPE_SHARED_FLAG = 0x40; private final static int URL_SHARED_FLAG = 0x20; private final static int NAME_SHARED_FLAG = 0x20; private final static int HAS_FIELD_NAMES = 0x20; private final static int DATE_TIME_INDICATOR = 0x01; private final static int DATE_INDICATOR = 0x02; private final static TypeFactory tf = TypeFactory.getInstance(); private final ResizingArray<IValue> sharedValues; private int currentSharedValueId; private final ResizingArray<Type> sharedTypes; private int currentSharedTypeId; private final ResizingArray<URI> sharedPaths; private int currentSharedPathId; private final ResizingArray<String> sharedNames; private int currentSharedNamesId; private final IValueFactory valueFactory; private final TypeStore typeStore; private final InputStream in; public BinaryReader(IValueFactory valueFactory, TypeStore typeStore, InputStream inputStream){ super(); this.valueFactory = valueFactory; this.typeStore = typeStore; this.in = inputStream; sharedValues = new ResizingArray<>(DEFAULT_SHARED_VALUES_STORE_SIZE); currentSharedValueId = 0; sharedTypes = new ResizingArray<>(DEFAULT_SHARED_TYPES_STORE_SIZE); currentSharedTypeId = 0; sharedPaths = new ResizingArray<>(DEFAULT_SHARED_PATHS_STORE_SIZE); currentSharedPathId = 0; sharedNames = new ResizingArray<>(DEFAULT_SHARED_NAMES_STORE_SIZE); currentSharedNamesId = 0; } public IValue deserialize() throws IOException{ int header = read(); if((header & SHARED_FLAG) == SHARED_FLAG){ return sharedValues.get(parseInteger()); } IValue value; int valueType = header & TYPE_MASK; switch(valueType){ case BOOL_HEADER: value = readBool(); break; case INTEGER_HEADER: value = readInteger(); break; case BIG_INTEGER_HEADER: value = readBigInteger(); break; case DOUBLE_HEADER: value = readDouble(); break; case IEEE754_ENCODED_DOUBLE_HEADER: value = readIEEE754EncodedDouble(); break; case STRING_HEADER: value = readString(); break; case SOURCE_LOCATION_HEADER: value = readSourceLocation(header); break; case DATE_TIME_HEADER: value = readDateTime(); break; case TUPLE_HEADER: value = readTuple(); break; case NODE_HEADER: value = readNode(header); break; case ANNOTATED_NODE_HEADER: value = readAnnotatedNode(header); break; case CONSTRUCTOR_HEADER: value = readConstructor(header); break; case ANNOTATED_CONSTRUCTOR_HEADER: value = readAnnotatedConstructor(header); break; case LIST_HEADER: value = readList(header); break; case SET_HEADER: value = readSet(header); break; case RELATION_HEADER: value = readRelation(header); break; case MAP_HEADER: value = readMap(header); break; case RATIONAL_HEADER: value = readRational(); break; default: throw new RuntimeException("Unknow value type: "+valueType); } boolean hashValue = true; if (value.getType().isAbstractData()) { IConstructor consValue = (IConstructor)value; if (consValue.hasAnnotations()) { Map<String,IValue> amap = consValue.getAnnotations(); for (Entry<String, IValue> aEntry : amap.entrySet()) { Type aType = aEntry.getValue().getType(); if (!aType.equivalent(tf.voidType()) && aType.isSourceLocation()) { hashValue = false; break; } } } } if (hashValue) { sharedValues.set(value, currentSharedValueId++); } return value; } // Called by value stuff. private Type readType(int header) throws IOException{ if((header & TYPE_SHARED_FLAG) == TYPE_SHARED_FLAG){ return sharedTypes.get(parseInteger()); } return doReadType(read()); } // Called by type stuff. private Type doReadType() throws IOException{ return doReadType(read()); } private Type doReadType(int typeHeader) throws IOException{ if((typeHeader & SHARED_FLAG) == SHARED_FLAG){ return sharedTypes.get(parseInteger()); } Type type; int typeType = typeHeader & TYPE_MASK; switch(typeType){ case VALUE_TYPE_HEADER: type = readValueType(); break; case VOID_TYPE_HEADER: type = readVoidType(); break; case BOOL_TYPE_HEADER: type = readBoolType(); break; case INTEGER_TYPE_HEADER: type = readIntegerType(); break; case DOUBLE_TYPE_HEADER: type = readDoubleType(); break; case STRING_TYPE_HEADER: type = readStringType(); break; case SOURCE_LOCATION_TYPE_HEADER: type = readSourceLocationType(); break; case DATE_TIME_TYPE_HEADER: type = readDateTimeType(); break; case NODE_TYPE_HEADER: type = readNodeType(); break; case TUPLE_TYPE_HEADER: type = readTupleType(typeHeader); break; case LIST_TYPE_HEADER: type = readListType(); break; case SET_TYPE_HEADER: type = readSetType(); break; case RELATION_TYPE_HEADER: type = readRelationType(); break; case MAP_TYPE_HEADER: type = readMapType(typeHeader); break; case PARAMETER_TYPE_HEADER: type = readParameterType(); break; case ADT_TYPE_HEADER: type = readADTType(); break; case CONSTRUCTOR_TYPE_HEADER: type = readConstructorType(); break; case ALIAS_TYPE_HEADER: type = readAliasType(); break; case ANNOTATED_NODE_TYPE_HEADER: type = readAnnotatedNodeType(); break; case ANNOTATED_CONSTRUCTOR_TYPE_HEADER: type = readAnnotatedConstructorType(); break; case RATIONAL_TYPE_HEADER: type = readRationalType(); break; default: throw new RuntimeException("Unkown type type: "+typeType); } sharedTypes.set(type, currentSharedTypeId++); return type; } private IBool readBool() throws IOException{ int bool = read(); return valueFactory.bool(bool == 0 ? false : true); } private IInteger readInteger() throws IOException{ int integerValue = parseInteger(); return valueFactory.integer(integerValue); } private IInteger readBigInteger() throws IOException{ int length = parseInteger(); byte[] integerData = new byte[length]; read(integerData, 0, length); return valueFactory.integer(integerData); } private IRational readRational() throws IOException{ int length = parseInteger(); byte[] valueData = new byte[length]; read(valueData, 0, length); IInteger num = valueFactory.integer(valueData); length = parseInteger(); valueData = new byte[length]; read(valueData, 0, length); IInteger denom = valueFactory.integer(valueData); return valueFactory.rational(num, denom); } private IReal readDouble() throws IOException{ int length = parseInteger(); byte[] unscaledValueData = new byte[length]; read(unscaledValueData, 0, length); int scale = parseInteger(); return valueFactory.real(new BigDecimal(new BigInteger(unscaledValueData), scale).toString()); // The toString call kind of stinks. } private IReal readIEEE754EncodedDouble() throws IOException{ double theDouble = parseDouble(); return valueFactory.real(theDouble); // The toString call kind of stinks. } private IString readString() throws IOException{ int size = parseInteger(); byte[] data = new byte[size]; for(int i = 0; i< size; i++){ data[i] = (byte) read(); } return valueFactory.string(new String(data, BinaryWriter.CharEncoding)); } private ISourceLocation readSourceLocation(int header) throws IOException{ URI path; if((header & URL_SHARED_FLAG) == URL_SHARED_FLAG){ int path_id = parseInteger(); path = sharedPaths.get(path_id); }else{ int pathSize = parseInteger(); byte[] data = new byte[pathSize]; for(int i = 0; i< pathSize; i++){ data[i] = (byte) read(); } try{ path = new URI(new String(data, BinaryWriter.CharEncoding)); }catch(URISyntaxException e){ throw new FactParseError("Illegal URI", e); // Can't happen. } sharedPaths.set(path, currentSharedPathId++); } int offset = parseInteger(); int length = parseInteger(); int beginLine = parseInteger(); int endLine = parseInteger(); int beginCol = parseInteger(); int endCol = parseInteger(); if (offset < 0) { return valueFactory.sourceLocation(path); } if (beginLine < 0) { return valueFactory.sourceLocation(path, offset, length); } return valueFactory.sourceLocation(path, offset, length, beginLine, endLine, beginCol, endCol); } private IDateTime readDateTime() throws IOException{ int typeIndicator = read(); if(typeIndicator == DATE_TIME_INDICATOR){ int year = parseInteger(); int month = parseInteger(); int day = parseInteger(); int hour = parseInteger(); int minute = parseInteger(); int second = parseInteger(); int millisecond = parseInteger(); int timeZoneHourOffset = parseInteger(); int timeZoneMinuteOffset = parseInteger(); return valueFactory.datetime(year, month, day, hour, minute, second, millisecond, timeZoneHourOffset, timeZoneMinuteOffset); }else if(typeIndicator == DATE_INDICATOR){ int year = parseInteger(); int month = parseInteger(); int day = parseInteger(); return valueFactory.date(year, month, day); }else{ int hour = parseInteger(); int minute = parseInteger(); int second = parseInteger(); int millisecond = parseInteger(); int timeZoneHourOffset = parseInteger(); int timeZoneMinuteOffset = parseInteger(); return valueFactory.time(hour, minute, second, millisecond, timeZoneHourOffset, timeZoneMinuteOffset); } } private ITuple readTuple() throws IOException{ int arity = parseInteger(); IValue[] content = new IValue[arity]; for(int i = 0; i < arity; i++){ content[i] = deserialize(); } return valueFactory.tuple(content); } private INode readNode(int header) throws IOException{ String nodeName; if((header & NAME_SHARED_FLAG) == NAME_SHARED_FLAG){ nodeName = sharedNames.get(parseInteger()); }else{ int nodeNameLength = parseInteger(); byte[] data = new byte[nodeNameLength]; for(int i = 0; i < nodeNameLength; i++){ data[i] = (byte) read(); } nodeName = new String(data, BinaryWriter.CharEncoding); sharedNames.set(nodeName, currentSharedNamesId++); } int arity = parseInteger(); IValue[] content = new IValue[arity]; for(int i = 0; i < arity; i++){ content[i] = deserialize(); } return valueFactory.node(nodeName, content); } private INode readAnnotatedNode(int header) throws IOException{ String nodeName; if((header & NAME_SHARED_FLAG) == NAME_SHARED_FLAG){ nodeName = sharedNames.get(parseInteger()); }else{ int nodeNameLength = parseInteger(); byte[] data = new byte[nodeNameLength]; for(int i = 0; i < nodeNameLength; i++){ data[i] = (byte) read(); } nodeName = new String(data, BinaryWriter.CharEncoding); sharedNames.set(nodeName, currentSharedNamesId++); } int arity = parseInteger(); IValue[] content = new IValue[arity]; for(int i = 0; i < arity; i++){ content[i] = deserialize(); } int numberOfAnnotations = parseInteger(); ShareableHashMap<String, IValue> annotations = new ShareableHashMap<>(); for(int i = numberOfAnnotations - 1; i >= 0; i int labelLength = parseInteger(); byte[] labelData = new byte[labelLength]; read(labelData); String label = new String(labelData, BinaryWriter.CharEncoding); IValue value = deserialize(); annotations.put(label, value); } INode node = valueFactory.node(nodeName, content); return node.setAnnotations(annotations); } private IConstructor readConstructor(int header) throws IOException{ Type constructorType = readType(header); int arity = parseInteger(); IValue[] content = new IValue[arity]; for(int i = 0; i < arity; i++){ content[i] = deserialize(); } return valueFactory.constructor(constructorType, content); } private IConstructor readAnnotatedConstructor(int header) throws IOException{ Type constructorType = readType(header); int arity = parseInteger(); IValue[] content = new IValue[arity]; for(int i = 0; i < arity; i++){ content[i] = deserialize(); } int numberOfAnnotations = parseInteger(); ShareableHashMap<String, IValue> annotations = new ShareableHashMap<>(); for(int i = numberOfAnnotations - 1; i >= 0; i int labelLength = parseInteger(); byte[] labelData = new byte[labelLength]; read(labelData); String label = new String(labelData, BinaryWriter.CharEncoding); IValue value = deserialize(); annotations.put(label, value); } IConstructor constructor = valueFactory.constructor(constructorType, content); return constructor.setAnnotations(annotations); } private IList readList(int header) throws IOException{ Type elementType = readType(header); int length = parseInteger(); IListWriter listWriter = valueFactory.listWriter(elementType); for(int i = 0; i < length; i++){ listWriter.append(deserialize()); } return listWriter.done(); } private ISet readSet(int header) throws IOException{ Type elementType = readType(header); int length = parseInteger(); ISetWriter setWriter = valueFactory.setWriter(elementType); for(int i = 0; i < length; i++){ setWriter.insert(deserialize()); } return setWriter.done(); } private ISet readRelation(int header) throws IOException{ Type elementType = readType(header); int length = parseInteger(); ISetWriter relationWriter = valueFactory.relationWriter(elementType); for(int i = 0; i < length; i++){ relationWriter.insert(deserialize()); } return relationWriter.done(); } private IMap readMap(int header) throws IOException{ Type mapType = readType(header); int length = parseInteger(); IMapWriter mapWriter = valueFactory.mapWriter(mapType); for(int i = 0; i < length; i++){ IValue key = deserialize(); IValue value = deserialize(); mapWriter.put(key, value); } return mapWriter.done(); } private Type readValueType(){ return tf.valueType(); } private Type readVoidType(){ return tf.voidType(); } private Type readBoolType(){ return tf.boolType(); } private Type readIntegerType(){ return tf.integerType(); } private Type readRationalType(){ return tf.rationalType(); } private Type readDoubleType(){ return tf.realType(); } private Type readStringType(){ return tf.stringType(); } private Type readSourceLocationType(){ return tf.sourceLocationType(); } private Type readDateTimeType(){ return tf.dateTimeType(); } private Type readNodeType(){ return tf.nodeType(); } private Type readAnnotatedNodeType() throws IOException{ Type nodeType = tf.nodeType(); int nrOfAnnotations = parseInteger(); for(--nrOfAnnotations; nrOfAnnotations >= 0; nrOfAnnotations int nrOfLabelBytes = parseInteger(); byte[] labelBytes = new byte[nrOfLabelBytes]; read(labelBytes); String label = new String(labelBytes, BinaryWriter.CharEncoding); Type valueType = doReadType(); typeStore.declareAnnotation(nodeType, label, valueType); } return nodeType; } private Type readTupleType(int header) throws IOException{ boolean hasFieldNames = ((header & HAS_FIELD_NAMES) == HAS_FIELD_NAMES); if(hasFieldNames){ int arity = parseInteger(); Type[] fields = new Type[arity]; String[] fieldNames = new String[arity]; for(int i = 0; i < arity; i++){ fields[i] = doReadType(); int fieldNameLength = parseInteger(); byte[] fieldNameData = new byte[fieldNameLength]; read(fieldNameData); fieldNames[i] = new String(fieldNameData, BinaryWriter.CharEncoding); } return tf.tupleType(fields, fieldNames); } int arity = parseInteger(); Type[] fields = new Type[arity]; for(int i = 0; i < arity; i++){ fields[i] = doReadType(); } return tf.tupleType(fields); } private Type readListType() throws IOException{ Type elementType = doReadType(); return tf.listType(elementType); } private Type readSetType() throws IOException{ Type elementType = doReadType(); return tf.setType(elementType); } private Type readRelationType() throws IOException{ Type elementType = doReadType(); return tf.relTypeFromTuple(elementType); } private Type readMapType(int header) throws IOException{ boolean hasFieldNames = ((header & HAS_FIELD_NAMES) == HAS_FIELD_NAMES); if(hasFieldNames){ Type keyType = doReadType(); int keyLabelLength = parseInteger(); byte[] keyLabelData = new byte[keyLabelLength]; read(keyLabelData); String keyLabel = new String(keyLabelData, BinaryWriter.CharEncoding); Type valueType = doReadType(); int valueLabelLength = parseInteger(); byte[] valueLabelData = new byte[valueLabelLength]; read(valueLabelData); String valueLabel = new String(valueLabelData, BinaryWriter.CharEncoding); return tf.mapType(keyType, keyLabel, valueType, valueLabel); } else { Type keyType = doReadType(); Type valueType = doReadType(); return tf.mapType(keyType, valueType); } } private Type readParameterType() throws IOException{ int nameLength = parseInteger(); byte[] nameData = new byte[nameLength]; read(nameData); String name = new String(nameData, BinaryWriter.CharEncoding); Type bound = doReadType(); return tf.parameterType(name, bound); } private Type readADTType() throws IOException{ int nameLength = parseInteger(); byte[] nameData = new byte[nameLength]; read(nameData); String name = new String(nameData, BinaryWriter.CharEncoding); Type parameters = doReadType(); return tf.abstractDataTypeFromTuple(typeStore, name, parameters); } private Type readConstructorType() throws IOException{ int nameLength = parseInteger(); byte[] nameData = new byte[nameLength]; read(nameData); String name = new String(nameData, BinaryWriter.CharEncoding); Type fieldTypes = doReadType(); Type adtType = doReadType(); return tf.constructorFromTuple(typeStore, adtType, name, fieldTypes); } private Type readAnnotatedConstructorType() throws IOException{ int nameLength = parseInteger(); byte[] nameData = new byte[nameLength]; read(nameData); String name = new String(nameData, BinaryWriter.CharEncoding); Type fieldTypes = doReadType(); Type adtType = doReadType(); Type constructorType = tf.constructorFromTuple(typeStore, adtType, name, fieldTypes); int nrOfAnnotations = parseInteger(); for(--nrOfAnnotations; nrOfAnnotations >= 0; nrOfAnnotations int nrOfLabelBytes = parseInteger(); byte[] labelBytes = new byte[nrOfLabelBytes]; read(labelBytes); String label = new String(labelBytes, BinaryWriter.CharEncoding); Type valueType = doReadType(); typeStore.declareAnnotation(constructorType, label, valueType); } return constructorType; } private Type readAliasType() throws IOException{ int nameLength = parseInteger(); byte[] nameData = new byte[nameLength]; read(nameData); String name = new String(nameData, BinaryWriter.CharEncoding); Type aliasedType = doReadType(); Type parameters = doReadType(); return tf.aliasTypeFromTuple(typeStore, name, aliasedType, parameters); } private final static int SEVENBITS = 0x0000007f; private final static int SIGNBIT = 0x00000080; private int parseInteger() throws IOException{ int part = read(); int result = (part & SEVENBITS); if((part & SIGNBIT) == 0) return result; part = read(); result |= ((part & SEVENBITS) << 7); if((part & SIGNBIT) == 0) return result; part = read(); result |= ((part & SEVENBITS) << 14); if((part & SIGNBIT) == 0) return result; part = read(); result |= ((part & SEVENBITS) << 21); if((part & SIGNBIT) == 0) return result; part = read(); result |= ((part & SEVENBITS) << 28); return result; } private final static int BYTEMASK = 0x000000ff; private final static int BYTEBITS = 8; private final static int LONGBITS = 8; private double parseDouble() throws IOException{ long result = 0; for(int i = 0; i < LONGBITS; i++){ result |= ((((long) read()) & BYTEMASK) << (i * BYTEBITS)); } return Double.longBitsToDouble(result); } private int read() throws IOException { int b = in.read(); if(b == -1) { throw new UnexpectedEOF(); } return b; } private void read(byte[] buffer) throws IOException { read(buffer, 0, buffer.length); } private void read(byte[] buffer, int offset, int length) throws IOException { int read; while(length > 0) { read = in.read(buffer, offset, length); if(read == -1) { throw new UnexpectedEOF(); } length = length - read; offset = offset + read; } } static class UnexpectedEOF extends IOException { private static final long serialVersionUID = -907629554395808678L; } }
package mil.nga.mapcache.view.map.grid; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import com.google.android.gms.maps.model.BitmapDescriptor; import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.MarkerOptions; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Polygon; import java.util.ArrayList; import java.util.List; import mil.nga.geopackage.BoundingBox; /** * Creates labels that will be visible in the center of the grid. */ public class LabelMaker { /** * The grid model to update. */ private GridModel gridModel; /** * Constructor. * * @param gridModel The grid model to update. */ public LabelMaker(GridModel gridModel) { this.gridModel = gridModel; } /** * Creates the labels for each grid to be placed at the center of each grid. */ public void createLabels() { List<MarkerOptions> labels = new ArrayList<>(); for (Grid grid : gridModel.getGrids()) { if(grid.getText() != null) { Polygon box = grid.getBounds(); double maxLat = -90; double maxLon = -180; double minLat = 90; double minLon = 180; for(Coordinate coord : box.getCoordinates()) { if(coord.y > maxLat) { maxLat = coord.y; } if(coord.y < minLat) { minLat = coord.y; } if(coord.x > maxLon) { maxLon = coord.x; } if(coord.x < minLon) { minLon = coord.x; } } double centerLat = (maxLat + minLat) / 2; double centerLon = (maxLon + minLon) / 2; MarkerOptions marker = new MarkerOptions(); marker.position(new LatLng(centerLat, centerLon)); BitmapDescriptor textIcon = createLabel(grid); marker.icon(textIcon); labels.add(marker); } } if(!labels.isEmpty()) { MarkerOptions[] newLabels = labels.toArray(new MarkerOptions[0]); gridModel.setLabels(newLabels); } } /** * Create a bitmap containing the text to be used for the marker. * * @param grid The grid to put a label for. * @return The marker's text image. */ private BitmapDescriptor createLabel(Grid grid) { Paint textPaint = new Paint(); textPaint.setTextSize(20); textPaint.setColor(grid.getColor()); float textWidth = textPaint.measureText(grid.getText()); float textHeight = textPaint.getTextSize(); int width = (int) (textWidth); int height = (int) (textHeight); Bitmap image = Bitmap.createBitmap(width, height + 15, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(image); canvas.translate(0, height); canvas.drawText(grid.getText(), 0, 0, textPaint); BitmapDescriptor icon = BitmapDescriptorFactory.fromBitmap(image); return icon; } }
package com.anwios.android.views; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Rect; import android.os.Parcel; import android.os.Parcelable; import android.util.AttributeSet; import android.util.DisplayMetrics; import android.view.MotionEvent; import android.view.View; public class TimePickView extends View { // Constants private static final int DEFAULT_SIZE =300; // Fields private Paint p = new Paint(Paint.ANTI_ALIAS_FLAG); private Paint markerTextPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private double hourAngle = 0; private double minuteAngle = 0; private boolean isSettingHour = true; private boolean isSettingMinute = false; private boolean autoSetMinuteAfterHour = false; private onTimeSetListener listener; private boolean showMarkers = true; private float markerSize; private int markerColor; private float markerWidth; private boolean showMarkerText = true; private float markerTextSize; private int markerTextColor; private boolean showCenterPoint; private float centerPointSize; private int centerPointColor; private boolean moveHourHandOnMinute = true; private float minuteHandWidth; private float hourHandWidth; private int hourHandColor; private int minuteHandColor; private float textSize; private int textColor; private int textPaddingTop; private String separator = ":"; private boolean canSetTime = true; private int backgroundColor; // Constructors public TimePickView(Context context) { super(context); init(context, null); } public TimePickView(Context context, AttributeSet attrs) { super(context, attrs); init(context, attrs); } public TimePickView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(context, attrs); } // Getter & Setter public void setTime(int hour, int minute) { setHourAngle(hour); setMinuteAngle(minute); invalidate(); } public void setTime() { autoSetMinuteAfterHour = true; isSettingMinute = false; isSettingHour = true; canSetTime=true; } public int getHour() { return angleToHour(); } public void setHour(int hour) { setHourAngle(hour); invalidate(); } public void setHour() { //set hour only autoSetMinuteAfterHour = false; canSetTime=true; isSettingMinute = false; isSettingHour = true; } public int getMinute() { return angleToMinute(); } public void setMinute(int minute) { setMinuteAngle(minute); invalidate(); } public void setMinute() { isSettingHour = false; isSettingMinute = true; canSetTime=true; } public boolean isMoveHourhandOnMinute() { return moveHourHandOnMinute; } public void setMoveHourhandOnMinute(boolean moveHourhandOnMinute) { this.moveHourHandOnMinute = moveHourhandOnMinute; } public float getHourMarkerSize() { return markerSize; } public void setHourMarkerSize(float hourMarkerSize) { this.markerSize = hourMarkerSize; invalidate(); } public boolean isSettingHour() { return isSettingHour; } public boolean isSettingMinute() { return isSettingMinute; } public String getSeparator() { return separator; } public void setSeparator(String separator) { this.separator = separator; invalidate(); } public int getTextPaddingTop() { return textPaddingTop; } public void setTextPaddingTop(int textPaddingTop) { this.textPaddingTop = textPaddingTop; invalidate(); } public int getTextColor() { return textColor; } public void setTextColor(int textColor) { this.textColor = textColor; invalidate(); } public float getTextSize() { return textSize; } public void setTextSize(float textSize) { this.textSize = textSize; invalidate(); } public int getMinuteHandColor() { return minuteHandColor; } public void setMinuteHandColor(int minuteHandColor) { this.minuteHandColor = minuteHandColor; invalidate(); } public int getHourHandColor() { return hourHandColor; } public void setHourHandColor(int hourHandColor) { this.hourHandColor = hourHandColor; invalidate(); } public float getHourHandWidth() { return hourHandWidth; } public void setHourHandWidth(float hourHandWidth) { this.hourHandWidth = hourHandWidth; invalidate(); } public float getMinuteHandWidth() { return minuteHandWidth; } public void setMinuteHandWidth(float minuteHandWidth) { this.minuteHandWidth = minuteHandWidth; invalidate(); } public int getCenterPointColor() { return centerPointColor; } public void setCenterPointColor(int centerPointColor) { this.centerPointColor = centerPointColor; invalidate(); } public float getCenterPointSize() { return centerPointSize; } public void setCenterPointSize(float centerPointSize) { this.centerPointSize = centerPointSize; invalidate(); } public void setShowCenterPoint(boolean showCenterPoint) { this.showCenterPoint = showCenterPoint; invalidate(); } public int getMarkerTextColor() { return markerTextColor; } public void setMarkerTextColor(int markerTextColor) { this.markerTextColor = markerTextColor; invalidate(); } public float getMarkerTextSize() { return markerTextSize; } public void setMarkerTextSize(float markerTextSize) { this.markerTextSize = markerTextSize; invalidate(); } public boolean isShowMarkerText() { return showMarkerText; } public void setShowMarkerText(boolean showHourText) { this.showMarkerText = showHourText; invalidate(); } public float getMarkerWidth() { return markerWidth; } public void setMarkerWidth(float markerWidth) { this.markerWidth = markerWidth; invalidate(); } public boolean isShowMarkers() { return showMarkers; } public void setShowMarkers(boolean showHourMarker) { this.showMarkers = showHourMarker; invalidate(); } public boolean isAutoSetMinuteAfterHour() { return autoSetMinuteAfterHour; } public void setAutoSetMinuteAfterHour(boolean autoSetMinuteAfterHour) { this.autoSetMinuteAfterHour = autoSetMinuteAfterHour; } public boolean CanSetTime() { return canSetTime; } public void setCanSetTime(boolean canSetTime) { this.canSetTime = canSetTime; } public void setOnTimeSetListener(onTimeSetListener listener) { this.listener = listener; } public int getHourMarkerColor() { return markerColor; } public void setHourMarkerColor(int hourMarkerColor) { this.markerColor = hourMarkerColor; } // Methods for/from SuperClass/Interfaces @Override protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) { final int widthMode = MeasureSpec.getMode(widthMeasureSpec); final int heightMode = MeasureSpec.getMode(heightMeasureSpec); final int widthSize = MeasureSpec.getSize(widthMeasureSpec); final int heightSize = MeasureSpec.getSize(heightMeasureSpec); final int chosenWidth = getDimension(widthMode, widthSize); final int chosenHeight = getDimension(heightMode, heightSize); setMeasuredDimension(chosenWidth, chosenHeight); } @Override protected void onDraw(Canvas canvas) { float drawableWidth = getWidth(); float drawableHeight = getHeight(); float radius = (Math.min(drawableWidth, drawableHeight)) / 2; float halfWidth = drawableWidth / 2; float halfHeight = drawableHeight / 2; float padding = dpToPx(4); //background p.setColor(backgroundColor); canvas.drawCircle(halfWidth, halfHeight, radius, p); radius -= padding; p.setStrokeWidth(markerWidth); p.setColor(Color.parseColor("#3F51B5")); //draw markers and Text if (showMarkers || showMarkerText) { markerTextPaint.setColor(markerTextColor); markerTextPaint.setTextSize(markerTextSize); int hour = 1; for (int i = 0; i < 360; i = i + 30) { canvas.rotate(30, halfWidth, halfHeight); if (showMarkers) { p.setColor(markerColor); canvas.drawLine(halfWidth, halfHeight - radius + markerSize, halfWidth, halfHeight - radius, p); } if (showMarkerText) { String text = String.valueOf(hour++); Rect bounds = new Rect(); markerTextPaint.getTextBounds(text, 0, text.length(), bounds); float textHeight = bounds.height(); int width = bounds.width(); canvas.drawText(text, halfWidth - width / 2, halfHeight - radius + markerSize + textHeight + padding, markerTextPaint); } } } //draw text p.setColor(textColor); StringBuilder text = new StringBuilder(); text.append(angleToHour()); text.append(separator); text.append(angleToMinute()); Rect bounds = new Rect(); p.setTextSize(textSize); p.getTextBounds(text.toString(), 0, text.length(), bounds); int textWidth = bounds.width(); canvas.drawText(text.toString(), halfWidth - textWidth / 2, halfHeight + textPaddingTop + textSize, p); //draw Hands p.setColor(minuteHandColor); canvas.rotate((float) minuteAngle, halfWidth, halfHeight); p.setStrokeWidth(minuteHandWidth); canvas.drawLine(halfWidth, halfHeight - dpToPx(6) - centerPointSize, halfWidth, halfHeight - radius, p); canvas.rotate(-(float) minuteAngle, halfWidth, halfHeight); p.setColor(hourHandColor); canvas.rotate((float) hourAngle, halfWidth, halfHeight); p.setStrokeWidth(hourHandWidth); canvas.drawLine(halfWidth, halfHeight - dpToPx(6) - centerPointSize, halfWidth, halfHeight - ((2 * radius / 3)), p); canvas.rotate(-(float) hourAngle, halfWidth, halfHeight); //draw center point if (showCenterPoint) { p.setColor(centerPointColor); canvas.drawCircle(halfWidth, halfHeight, centerPointSize, p); } super.onDraw(canvas); } @Override public Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); SavedState state = new SavedState(superState); state.hourAngle = this.hourAngle; state.minuteAngle = this.minuteAngle; return state; } @Override public void onRestoreInstanceState(Parcelable state) { if (!(state instanceof SavedState)) { super.onRestoreInstanceState(state); return; } SavedState ss = (SavedState) state; super.onRestoreInstanceState(ss.getSuperState()); this.hourAngle = ss.hourAngle; this.minuteAngle = ss.minuteAngle; } // Methods private void init(Context context, AttributeSet attrs) { TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.TimePickView); showMarkers = typedArray.getBoolean(R.styleable.TimePickView_tpv_showMarkers, true); markerSize = typedArray.getDimensionPixelSize(R.styleable.TimePickView_tpv_markerSize, getResources().getDimensionPixelSize(R.dimen.tpv_marker_size)); markerColor = typedArray.getColor(R.styleable.TimePickView_tpv_markerColor, Color.parseColor("#3F51B5")); markerWidth = typedArray.getDimensionPixelSize(R.styleable.TimePickView_tpv_markerWidth, getResources().getDimensionPixelSize(R.dimen.tpv_marker_width)); showMarkerText = typedArray.getBoolean(R.styleable.TimePickView_tpv_showMarkerText, true); markerTextSize = typedArray.getDimensionPixelSize(R.styleable.TimePickView_tpv_markerTextSize, getResources().getDimensionPixelSize(R.dimen.tpv_marker_text_size)); markerTextColor = typedArray.getColor(R.styleable.TimePickView_tpv_markerTextColor, Color.parseColor("#3F51B5")); showCenterPoint = typedArray.getBoolean(R.styleable.TimePickView_tpv_showCenterPoint, true); centerPointSize = typedArray.getDimensionPixelSize(R.styleable.TimePickView_tpv_centerPointSize, getResources().getDimensionPixelSize(R.dimen.tpv_center_point_size)); centerPointColor = typedArray.getColor(R.styleable.TimePickView_tpv_centerPointColor, Color.parseColor("#3F51B5")); minuteHandWidth = typedArray.getDimensionPixelSize(R.styleable.TimePickView_tpv_minuteHandWidth, getResources().getDimensionPixelSize(R.dimen.tpv_minute_hand_width)); hourHandWidth = typedArray.getDimensionPixelSize(R.styleable.TimePickView_tpv_hourHandWidth, getResources().getDimensionPixelSize(R.dimen.tpv_hour_hand_width)); hourHandColor = typedArray.getColor(R.styleable.TimePickView_tpv_hourHandColor, Color.parseColor("#B71C1C")); minuteHandColor = typedArray.getColor(R.styleable.TimePickView_tpv_minuteHandColor, Color.parseColor("#B71C1C")); textSize = typedArray.getDimensionPixelSize(R.styleable.TimePickView_tpv_textSize, getResources().getDimensionPixelSize(R.dimen.tpv_text_size)); textColor = typedArray.getColor(R.styleable.TimePickView_tpv_textColor, Color.parseColor("#3F51B5")); textPaddingTop = typedArray.getDimensionPixelSize(R.styleable.TimePickView_tpv_textPaddingTop, getResources().getDimensionPixelSize(R.dimen.tpv_text_padding)); backgroundColor = typedArray.getColor(R.styleable.TimePickView_tpv_backgroundColor, Color.parseColor("#FFFFFF")); canSetTime = typedArray.getBoolean(R.styleable.TimePickView_tpv_canSetTime, true); } private int getDimension(final int mode, final int size) { switch (mode) { case MeasureSpec.AT_MOST: case MeasureSpec.EXACTLY: return size; case MeasureSpec.UNSPECIFIED: default: return DEFAULT_SIZE; } } private int dpToPx(int dp) { DisplayMetrics displayMetrics = getContext().getResources().getDisplayMetrics(); int px = Math.round(dp * (displayMetrics.xdpi / DisplayMetrics.DENSITY_DEFAULT)); return px; } @Override public boolean onTouchEvent(MotionEvent event) { if(canSetTime) { switch (event.getAction()) { case MotionEvent.ACTION_DOWN: //call listener (beforeTimeChanged) if (!(autoSetMinuteAfterHour && isSettingMinute)) { beforeSetTime(); } break; case MotionEvent.ACTION_MOVE: float x = event.getX(); float y = event.getY(); float centerX = getWidth() / 2; float centerY = getHeight() / 2; //find angle double angle1 = Math.atan2((y - centerY), (x - centerX)); double angle2 = Math.atan2((0 - centerY), 0); double angle = Math.toDegrees(angle1 - angle2); angle = (angle < 0) ? 360 + angle : angle; //set angle if (isSettingHour) { hourAngle = angle; } else if (isSettingMinute) { minuteAngle = angle; if (moveHourHandOnMinute) { hourAngle = ((int) (hourAngle / 30)) * 30 + (angleToMinute() / 60f) * 30f; } } //call listener onSetTime(); break; case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: if (autoSetMinuteAfterHour) { if (isSettingHour) { isSettingHour = false; isSettingMinute = true; } else if (isSettingMinute) { isSettingMinute = false; canSetTime=false; //call listener afterSetTime(); } } else { //call listener afterSetTime(); } break; } invalidate(); } return true; } private int angleToHour() { int hour = (int) (hourAngle / 30); hour = (hour == 0) ? 12 : hour; return hour; } private int angleToMinute() { int minute = (int) (minuteAngle / 6); return (minute); } private void setHourAngle(int hour) { hour = hour == 12 ? 0 : hour; this.hourAngle = hour * 30; } private void setMinuteAngle(int minute) { this.minuteAngle = minute * 6; if (moveHourHandOnMinute) { hourAngle = ((int) (hourAngle / 30)) * 30 + (minute / 60f) * 30f; } } public interface onTimeSetListener { public void beforeTimeChanged(int hour, int minute); public void onTimeChanged(int hour, int minute); public void afterTimeChanged(int hour, int minute); } public void beforeSetTime() { if (listener != null) { listener.beforeTimeChanged(angleToHour(), angleToMinute()); } } public void onSetTime() { if (listener != null) { listener.onTimeChanged(angleToHour(), angleToMinute()); } } private void afterSetTime() { if (listener != null) { listener.afterTimeChanged(angleToHour(), angleToMinute()); } } public static class SavedState extends BaseSavedState { private double hourAngle; private double minuteAngle; SavedState(Parcelable superState) { super(superState); } private SavedState(Parcel in) { super(in); this.hourAngle = in.readDouble(); this.minuteAngle = in.readDouble(); } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeDouble(this.hourAngle); out.writeDouble(this.minuteAngle); } public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() { public SavedState createFromParcel(Parcel in) { return new SavedState(in); } public SavedState[] newArray(int size) { return new SavedState[size]; } }; } }
package org.jamocha.dn.memory.javaimpl; import java.util.ArrayList; import java.util.List; import lombok.ToString; import org.jamocha.dn.memory.MemoryHandler; import org.jamocha.dn.memory.Template; import org.jamocha.dn.nodes.SlotInFactAddress; import org.jamocha.filter.Filter.FilterElement; @ToString(callSuper = true, exclude = "originatingMainHandler") public abstract class MemoryHandlerTemp extends MemoryHandlerBase implements org.jamocha.dn.memory.MemoryHandlerTemp { final MemoryHandlerMain originatingMainHandler; protected MemoryHandlerTemp(final Template[] template, final MemoryHandlerMain originatingMainHandler, final List<Fact[]> facts) { super(template, facts); this.originatingMainHandler = originatingMainHandler; } protected MemoryHandlerTemp(final MemoryHandlerMain originatingMainHandler, final List<Fact[]> facts) { this(originatingMainHandler.getTemplate(), originatingMainHandler, facts); } @Override public List<MemoryHandler> splitIntoChunksOfSize(final int size) { final List<MemoryHandler> memoryHandlers = new ArrayList<>(); if (size >= this.size()) { memoryHandlers.add(this); return memoryHandlers; } final Template[] template = this.getTemplate(); final int max = this.size(); int current = 0; while (current < max) { final List<Fact[]> facts = new ArrayList<>(); for (int i = 0; i < size && current + i < max; ++i) { facts.add(this.facts.get(current + i)); } memoryHandlers.add(new MemoryHandlerBase(template, facts)); current += size; } return memoryHandlers; } protected static boolean applyFilterElement(final Fact fact, final FilterElement element) { // determine parameters final SlotInFactAddress addresses[] = element.getAddressesInTarget(); final int paramLength = addresses.length; final Object params[] = new Object[paramLength]; for (int i = 0; i < paramLength; ++i) { final SlotInFactAddress address = addresses[i]; params[i] = fact.getValue(address.getSlotAddress()); } // check filter return element.getFunction().evaluate(params); } }
package de.danoeh.antennapod.core.service.download; import android.annotation.SuppressLint; import android.app.Notification; import android.app.NotificationManager; import android.app.Service; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.media.MediaMetadataRetriever; import android.os.Binder; import android.os.Handler; import android.os.IBinder; import android.support.v4.app.NotificationCompat; import android.support.v4.util.Pair; import android.util.Log; import android.webkit.URLUtil; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.apache.http.HttpStatus; import org.xml.sax.SAXException; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.CompletionService; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import javax.xml.parsers.ParserConfigurationException; import de.danoeh.antennapod.core.ClientConfig; import de.danoeh.antennapod.core.R; import de.danoeh.antennapod.core.event.DownloadEvent; import de.danoeh.antennapod.core.event.FeedItemEvent; import de.danoeh.antennapod.core.feed.Feed; import de.danoeh.antennapod.core.feed.FeedImage; import de.danoeh.antennapod.core.feed.FeedItem; import de.danoeh.antennapod.core.feed.FeedMedia; import de.danoeh.antennapod.core.feed.FeedPreferences; import de.danoeh.antennapod.core.gpoddernet.model.GpodnetEpisodeAction; import de.danoeh.antennapod.core.gpoddernet.model.GpodnetEpisodeAction.Action; import de.danoeh.antennapod.core.preferences.GpodnetPreferences; import de.danoeh.antennapod.core.preferences.UserPreferences; import de.danoeh.antennapod.core.storage.DBReader; import de.danoeh.antennapod.core.storage.DBTasks; import de.danoeh.antennapod.core.storage.DBWriter; import de.danoeh.antennapod.core.storage.DownloadRequestException; import de.danoeh.antennapod.core.storage.DownloadRequester; import de.danoeh.antennapod.core.syndication.handler.FeedHandler; import de.danoeh.antennapod.core.syndication.handler.FeedHandlerResult; import de.danoeh.antennapod.core.syndication.handler.UnsupportedFeedtypeException; import de.danoeh.antennapod.core.util.ChapterUtils; import de.danoeh.antennapod.core.util.DownloadError; import de.danoeh.antennapod.core.util.InvalidFeedException; import de.greenrobot.event.EventBus; /** * Manages the download of feedfiles in the app. Downloads can be enqueued viathe startService intent. * The argument of the intent is an instance of DownloadRequest in the EXTRA_REQUEST field of * the intent. * After the downloads have finished, the downloaded object will be passed on to a specific handler, depending on the * type of the feedfile. */ public class DownloadService extends Service { private static final String TAG = "DownloadService"; /** * Cancels one download. The intent MUST have an EXTRA_DOWNLOAD_URL extra that contains the download URL of the * object whose download should be cancelled. */ public static final String ACTION_CANCEL_DOWNLOAD = "action.de.danoeh.antennapod.core.service.cancelDownload"; /** * Cancels all running downloads. */ public static final String ACTION_CANCEL_ALL_DOWNLOADS = "action.de.danoeh.antennapod.core.service.cancelAllDownloads"; /** * Extra for ACTION_CANCEL_DOWNLOAD */ public static final String EXTRA_DOWNLOAD_URL = "downloadUrl"; /** * Extra for ACTION_ENQUEUE_DOWNLOAD intent. */ public static final String EXTRA_REQUEST = "request"; /** * Contains all completed downloads that have not been included in the report yet. */ private List<DownloadStatus> reportQueue; private ExecutorService syncExecutor; private CompletionService<Downloader> downloadExecutor; private FeedSyncThread feedSyncThread; /** * Number of threads of downloadExecutor. */ private static final int NUM_PARALLEL_DOWNLOADS = 6; private DownloadRequester requester; private NotificationCompat.Builder notificationCompatBuilder; private int NOTIFICATION_ID = 2; private int REPORT_ID = 3; /** * Currently running downloads. */ private List<Downloader> downloads; /** * Number of running downloads. */ private AtomicInteger numberOfDownloads; /** * True if service is running. */ public static boolean isRunning = false; private Handler handler; private NotificationUpdater notificationUpdater; private ScheduledFuture notificationUpdaterFuture; private static final int SCHED_EX_POOL_SIZE = 1; private ScheduledThreadPoolExecutor schedExecutor; private Handler postHandler = new Handler(); private final IBinder mBinder = new LocalBinder(); public class LocalBinder extends Binder { public DownloadService getService() { return DownloadService.this; } } private Thread downloadCompletionThread = new Thread() { private static final String TAG = "downloadCompletionThd"; @Override public void run() { Log.d(TAG, "downloadCompletionThread was started"); while (!isInterrupted()) { try { Downloader downloader = downloadExecutor.take().get(); Log.d(TAG, "Received 'Download Complete' - message."); removeDownload(downloader); DownloadStatus status = downloader.getResult(); boolean successful = status.isSuccessful(); final int type = status.getFeedfileType(); if (successful) { if (type == Feed.FEEDFILETYPE_FEED) { handleCompletedFeedDownload(downloader.getDownloadRequest()); } else if (type == FeedMedia.FEEDFILETYPE_FEEDMEDIA) { handleCompletedFeedMediaDownload(status, downloader.getDownloadRequest()); } } else { numberOfDownloads.decrementAndGet(); if (!status.isCancelled()) { if (status.getReason() == DownloadError.ERROR_UNAUTHORIZED) { postAuthenticationNotification(downloader.getDownloadRequest()); } else if (status.getReason() == DownloadError.ERROR_HTTP_DATA_ERROR && Integer.valueOf(status.getReasonDetailed()) == HttpStatus.SC_REQUESTED_RANGE_NOT_SATISFIABLE) { Log.d(TAG, "Requested invalid range, restarting download from the beginning"); FileUtils.deleteQuietly(new File(downloader.getDownloadRequest().getDestination())); DownloadRequester.getInstance().download(DownloadService.this, downloader.getDownloadRequest()); } else { Log.e(TAG, "Download failed"); saveDownloadStatus(status); handleFailedDownload(status, downloader.getDownloadRequest()); // to make lists reload the failed item, we fake an item update if(type == FeedMedia.FEEDFILETYPE_FEEDMEDIA) { long id = status.getFeedfileId(); FeedMedia media = DBReader.getFeedMedia(id); EventBus.getDefault().post(FeedItemEvent.updated(media.getItem())); } } } else { // if FeedMedia download has been canceled, fake FeedItem update // so that lists reload that it if(status.getFeedfileType() == FeedMedia.FEEDFILETYPE_FEEDMEDIA) { FeedMedia media = DBReader.getFeedMedia(status.getFeedfileId()); EventBus.getDefault().post(FeedItemEvent.updated(media.getItem())); } } queryDownloadsAsync(); } } catch (InterruptedException e) { Log.d(TAG, "DownloadCompletionThread was interrupted"); } catch (ExecutionException e) { e.printStackTrace(); numberOfDownloads.decrementAndGet(); } } Log.d(TAG, "End of downloadCompletionThread"); } }; @Override public int onStartCommand(Intent intent, int flags, int startId) { if (intent.getParcelableExtra(EXTRA_REQUEST) != null) { onDownloadQueued(intent); } else if (numberOfDownloads.get() == 0) { stopSelf(); } return Service.START_NOT_STICKY; } @SuppressLint("NewApi") @Override public void onCreate() { Log.d(TAG, "Service started"); isRunning = true; handler = new Handler(); reportQueue = Collections.synchronizedList(new ArrayList<DownloadStatus>()); downloads = Collections.synchronizedList(new ArrayList<Downloader>()); numberOfDownloads = new AtomicInteger(0); IntentFilter cancelDownloadReceiverFilter = new IntentFilter(); cancelDownloadReceiverFilter.addAction(ACTION_CANCEL_ALL_DOWNLOADS); cancelDownloadReceiverFilter.addAction(ACTION_CANCEL_DOWNLOAD); registerReceiver(cancelDownloadReceiver, cancelDownloadReceiverFilter); syncExecutor = Executors.newSingleThreadExecutor(new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread t = new Thread(r); t.setPriority(Thread.MIN_PRIORITY); return t; } }); Log.d(TAG, "parallel downloads: " + UserPreferences.getParallelDownloads()); downloadExecutor = new ExecutorCompletionService<Downloader>( Executors.newFixedThreadPool(UserPreferences.getParallelDownloads(), new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread t = new Thread(r); t.setPriority(Thread.MIN_PRIORITY); return t; } } ) ); schedExecutor = new ScheduledThreadPoolExecutor(SCHED_EX_POOL_SIZE, new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread t = new Thread(r); t.setPriority(Thread.MIN_PRIORITY); return t; } }, new RejectedExecutionHandler() { @Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { Log.w(TAG, "SchedEx rejected submission of new task"); } } ); downloadCompletionThread.start(); feedSyncThread = new FeedSyncThread(); feedSyncThread.start(); setupNotificationBuilders(); requester = DownloadRequester.getInstance(); } @Override public IBinder onBind(Intent intent) { return mBinder; } @Override public void onDestroy() { Log.d(TAG, "Service shutting down"); isRunning = false; if (ClientConfig.downloadServiceCallbacks.shouldCreateReport() && UserPreferences.showDownloadReport()) { updateReport(); } postHandler.removeCallbacks(postDownloaderTask); EventBus.getDefault().postSticky(DownloadEvent.refresh(Collections.emptyList())); stopForeground(true); NotificationManager nm = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); nm.cancel(NOTIFICATION_ID); downloadCompletionThread.interrupt(); syncExecutor.shutdown(); schedExecutor.shutdown(); feedSyncThread.shutdown(); cancelNotificationUpdater(); unregisterReceiver(cancelDownloadReceiver); // start auto download in case anything new has shown up DBTasks.autodownloadUndownloadedItems(getApplicationContext()); } private void setupNotificationBuilders() { Bitmap icon = BitmapFactory.decodeResource(getResources(), R.drawable.stat_notify_sync); notificationCompatBuilder = new NotificationCompat.Builder(this) .setOngoing(true) .setContentIntent(ClientConfig.downloadServiceCallbacks.getNotificationContentIntent(this)) .setLargeIcon(icon) .setSmallIcon(R.drawable.stat_notify_sync); Log.d(TAG, "Notification set up"); } /** * Updates the contents of the service's notifications. Should be called * before setupNotificationBuilders. */ private Notification updateNotifications() { String contentTitle = getString(R.string.download_notification_title); int numDownloads = requester.getNumberOfDownloads(); String downloadsLeft; if (numDownloads > 0) { downloadsLeft = requester.getNumberOfDownloads() + getString(R.string.downloads_left); } else { downloadsLeft = getString(R.string.downloads_processing); } if (notificationCompatBuilder != null) { StringBuilder bigText = new StringBuilder(""); for (int i = 0; i < downloads.size(); i++) { Downloader downloader = downloads.get(i); final DownloadRequest request = downloader .getDownloadRequest(); if (request.getFeedfileType() == Feed.FEEDFILETYPE_FEED) { if (request.getTitle() != null) { if (i > 0) { bigText.append("\n"); } bigText.append("\u2022 " + request.getTitle()); } } else if (request.getFeedfileType() == FeedMedia.FEEDFILETYPE_FEEDMEDIA) { if (request.getTitle() != null) { if (i > 0) { bigText.append("\n"); } bigText.append("\u2022 " + request.getTitle() + " (" + request.getProgressPercent() + "%)"); } } } notificationCompatBuilder.setContentTitle(contentTitle); notificationCompatBuilder.setContentText(downloadsLeft); if (bigText != null) { notificationCompatBuilder.setStyle(new NotificationCompat.BigTextStyle().bigText(bigText.toString())); } return notificationCompatBuilder.build(); } return null; } private Downloader getDownloader(String downloadUrl) { for (Downloader downloader : downloads) { if (downloader.getDownloadRequest().getSource().equals(downloadUrl)) { return downloader; } } return null; } private BroadcastReceiver cancelDownloadReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if (StringUtils.equals(intent.getAction(), ACTION_CANCEL_DOWNLOAD)) { String url = intent.getStringExtra(EXTRA_DOWNLOAD_URL); Validate.notNull(url, "ACTION_CANCEL_DOWNLOAD intent needs download url extra"); Log.d(TAG, "Cancelling download with url " + url); Downloader d = getDownloader(url); if (d != null) { d.cancel(); } else { Log.e(TAG, "Could not cancel download with url " + url); } postDownloaders(); } else if (StringUtils.equals(intent.getAction(), ACTION_CANCEL_ALL_DOWNLOADS)) { for (Downloader d : downloads) { d.cancel(); Log.d(TAG, "Cancelled all downloads"); } postDownloaders(); } queryDownloads(); } }; private void onDownloadQueued(Intent intent) { Log.d(TAG, "Received enqueue request"); DownloadRequest request = intent.getParcelableExtra(EXTRA_REQUEST); if (request == null) { throw new IllegalArgumentException( "ACTION_ENQUEUE_DOWNLOAD intent needs request extra"); } Downloader downloader = getDownloader(request); if (downloader != null) { numberOfDownloads.incrementAndGet(); // smaller rss feeds before bigger media files if(request.getFeedfileType() == Feed.FEEDFILETYPE_FEED) { downloads.add(0, downloader); } else { downloads.add(downloader); } downloadExecutor.submit(downloader); postDownloaders(); } queryDownloads(); } private Downloader getDownloader(DownloadRequest request) { if (URLUtil.isHttpUrl(request.getSource()) || URLUtil.isHttpsUrl(request.getSource())) { return new HttpDownloader(request); } Log.e(TAG, "Could not find appropriate downloader for " + request.getSource() ); return null; } /** * Remove download from the DownloadRequester list and from the * DownloadService list. */ private void removeDownload(final Downloader d) { handler.post(new Runnable() { @Override public void run() { Log.d(TAG, "Removing downloader: " + d.getDownloadRequest().getSource()); boolean rc = downloads.remove(d); Log.d(TAG, "Result of downloads.remove: " + rc); DownloadRequester.getInstance().removeDownload(d.getDownloadRequest()); postDownloaders(); } }); } /** * Adds a new DownloadStatus object to the list of completed downloads and * saves it in the database * * @param status the download that is going to be saved */ private void saveDownloadStatus(DownloadStatus status) { reportQueue.add(status); DBWriter.addDownloadStatus(status); } /** * Creates a notification at the end of the service lifecycle to notify the * user about the number of completed downloads. A report will only be * created if there is at least one failed download excluding images */ private void updateReport() { // check if report should be created boolean createReport = false; int successfulDownloads = 0; int failedDownloads = 0; // a download report is created if at least one download has failed // (excluding failed image downloads) for (DownloadStatus status : reportQueue) { if (status.isSuccessful()) { successfulDownloads++; } else if (!status.isCancelled()) { if (status.getFeedfileType() != FeedImage.FEEDFILETYPE_FEEDIMAGE) { createReport = true; } failedDownloads++; } } if (createReport) { Log.d(TAG, "Creating report"); // create notification object Notification notification = new NotificationCompat.Builder(this) .setTicker( getString(R.string.download_report_title)) .setContentTitle( getString(R.string.download_report_content_title)) .setContentText( String.format( getString(R.string.download_report_content), successfulDownloads, failedDownloads) ) .setSmallIcon(R.drawable.stat_notify_sync_error) .setLargeIcon( BitmapFactory.decodeResource(getResources(), R.drawable.stat_notify_sync_error) ) .setContentIntent( ClientConfig.downloadServiceCallbacks.getReportNotificationContentIntent(this) ) .setAutoCancel(true).build(); NotificationManager nm = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); nm.notify(REPORT_ID, notification); } else { Log.d(TAG, "No report is created"); } reportQueue.clear(); } /** * Calls query downloads on the services main thread. This method should be used instead of queryDownloads if it is * used from a thread other than the main thread. */ void queryDownloadsAsync() { handler.post(new Runnable() { public void run() { queryDownloads(); ; } }); } /** * Check if there's something else to download, otherwise stop */ void queryDownloads() { Log.d(TAG, numberOfDownloads.get() + " downloads left"); if (numberOfDownloads.get() <= 0 && DownloadRequester.getInstance().hasNoDownloads()) { Log.d(TAG, "Number of downloads is " + numberOfDownloads.get() + ", attempting shutdown"); stopSelf(); } else { setupNotificationUpdater(); startForeground(NOTIFICATION_ID, updateNotifications()); } } private void postAuthenticationNotification(final DownloadRequest downloadRequest) { handler.post(new Runnable() { @Override public void run() { final String resourceTitle = (downloadRequest.getTitle() != null) ? downloadRequest.getTitle() : downloadRequest.getSource(); NotificationCompat.Builder builder = new NotificationCompat.Builder(DownloadService.this); builder.setTicker(getText(R.string.authentication_notification_title)) .setContentTitle(getText(R.string.authentication_notification_title)) .setContentText(getText(R.string.authentication_notification_msg)) .setStyle(new NotificationCompat.BigTextStyle().bigText(getText(R.string.authentication_notification_msg) + ": " + resourceTitle)) .setSmallIcon(R.drawable.ic_stat_authentication) .setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.ic_stat_authentication)) .setAutoCancel(true) .setContentIntent(ClientConfig.downloadServiceCallbacks.getAuthentificationNotificationContentIntent(DownloadService.this, downloadRequest)); Notification n = builder.build(); NotificationManager nm = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); nm.notify(downloadRequest.getSource().hashCode(), n); } }); } /** * Is called whenever a Feed is downloaded */ private void handleCompletedFeedDownload(DownloadRequest request) { Log.d(TAG, "Handling completed Feed Download"); feedSyncThread.submitCompletedDownload(request); } /** * Is called whenever a FeedMedia is downloaded. */ private void handleCompletedFeedMediaDownload(DownloadStatus status, DownloadRequest request) { Log.d(TAG, "Handling completed FeedMedia Download"); syncExecutor.execute(new MediaHandlerThread(status, request)); } private void handleFailedDownload(DownloadStatus status, DownloadRequest request) { Log.d(TAG, "Handling failed download"); syncExecutor.execute(new FailedDownloadHandler(status, request)); } /** * Takes a single Feed, parses the corresponding file and refreshes * information in the manager */ class FeedSyncThread extends Thread { private static final String TAG = "FeedSyncThread"; private BlockingQueue<DownloadRequest> completedRequests = new LinkedBlockingDeque<DownloadRequest>(); private CompletionService<Pair<DownloadRequest, FeedHandlerResult>> parserService = new ExecutorCompletionService<Pair<DownloadRequest, FeedHandlerResult>>(Executors.newSingleThreadExecutor()); private ExecutorService dbService = Executors.newSingleThreadExecutor(); private Future<?> dbUpdateFuture; private volatile boolean isActive = true; private volatile boolean isCollectingRequests = false; private final long WAIT_TIMEOUT = 3000; /** * Waits for completed requests. Once the first request has been taken, the method will wait WAIT_TIMEOUT ms longer to * collect more completed requests. * * @return Collected feeds or null if the method has been interrupted during the first waiting period. */ private List<Pair<DownloadRequest, FeedHandlerResult>> collectCompletedRequests() { List<Pair<DownloadRequest, FeedHandlerResult>> results = new LinkedList<Pair<DownloadRequest, FeedHandlerResult>>(); DownloadRequester requester = DownloadRequester.getInstance(); int tasks = 0; try { DownloadRequest request = completedRequests.take(); parserService.submit(new FeedParserTask(request)); tasks++; } catch (InterruptedException e) { return null; } tasks += pollCompletedDownloads(); isCollectingRequests = true; if (requester.isDownloadingFeeds()) { // wait for completion of more downloads long startTime = System.currentTimeMillis(); long currentTime = startTime; while (requester.isDownloadingFeeds() && (currentTime - startTime) < WAIT_TIMEOUT) { try { Log.d(TAG, "Waiting for " + (startTime + WAIT_TIMEOUT - currentTime) + " ms"); sleep(startTime + WAIT_TIMEOUT - currentTime); } catch (InterruptedException e) { Log.d(TAG, "interrupted while waiting for more downloads"); tasks += pollCompletedDownloads(); } finally { currentTime = System.currentTimeMillis(); } } tasks += pollCompletedDownloads(); } isCollectingRequests = false; for (int i = 0; i < tasks; i++) { try { Pair<DownloadRequest, FeedHandlerResult> result = parserService.take().get(); if (result != null) { results.add(result); } } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } return results; } private int pollCompletedDownloads() { int tasks = 0; for (int i = 0; i < completedRequests.size(); i++) { parserService.submit(new FeedParserTask(completedRequests.poll())); tasks++; } return tasks; } @Override public void run() { while (isActive) { final List<Pair<DownloadRequest, FeedHandlerResult>> results = collectCompletedRequests(); if (results == null) { continue; } Log.d(TAG, "Bundling " + results.size() + " feeds"); for (Pair<DownloadRequest, FeedHandlerResult> result : results) { removeDuplicateImages(result.second.feed); // duplicate images have to removed because the DownloadRequester does not accept two downloads with the same download URL yet. } // Save information of feed in DB if (dbUpdateFuture != null) { try { dbUpdateFuture.get(); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } dbUpdateFuture = dbService.submit(new Runnable() { @Override public void run() { Feed[] savedFeeds = DBTasks.updateFeed(DownloadService.this, getFeeds(results)); for (int i = 0; i < savedFeeds.length; i++) { Feed savedFeed = savedFeeds[i]; // If loadAllPages=true, check if another page is available and queue it for download final boolean loadAllPages = results.get(i).first.getArguments().getBoolean(DownloadRequester.REQUEST_ARG_LOAD_ALL_PAGES); final Feed feed = results.get(i).second.feed; if (loadAllPages && feed.getNextPageLink() != null) { try { feed.setId(savedFeed.getId()); DBTasks.loadNextPageOfFeed(DownloadService.this, savedFeed, true); } catch (DownloadRequestException e) { Log.e(TAG, "Error trying to load next page", e); } } ClientConfig.downloadServiceCallbacks.onFeedParsed(DownloadService.this, savedFeed); numberOfDownloads.decrementAndGet(); } queryDownloadsAsync(); } }); } if (dbUpdateFuture != null) { try { dbUpdateFuture.get(); } catch (InterruptedException e) { } catch (ExecutionException e) { e.printStackTrace(); } } Log.d(TAG, "Shutting down"); } /** * Helper method */ private Feed[] getFeeds(List<Pair<DownloadRequest, FeedHandlerResult>> results) { Feed[] feeds = new Feed[results.size()]; for (int i = 0; i < results.size(); i++) { feeds[i] = results.get(i).second.feed; } return feeds; } private class FeedParserTask implements Callable<Pair<DownloadRequest, FeedHandlerResult>> { private DownloadRequest request; private FeedParserTask(DownloadRequest request) { this.request = request; } @Override public Pair<DownloadRequest, FeedHandlerResult> call() throws Exception { return parseFeed(request); } } private Pair<DownloadRequest, FeedHandlerResult> parseFeed(DownloadRequest request) { Feed feed = new Feed(request.getSource(), new Date()); feed.setFile_url(request.getDestination()); feed.setId(request.getFeedfileId()); feed.setDownloaded(true); feed.setPreferences(new FeedPreferences(0, true, FeedPreferences.AutoDeleteAction.GLOBAL, request.getUsername(), request.getPassword())); feed.setPageNr(request.getArguments().getInt(DownloadRequester.REQUEST_ARG_PAGE_NR, 0)); DownloadError reason = null; String reasonDetailed = null; boolean successful = true; FeedHandler feedHandler = new FeedHandler(); FeedHandlerResult result = null; try { result = feedHandler.parseFeed(feed); Log.d(TAG, feed.getTitle() + " parsed"); if (checkFeedData(feed) == false) { throw new InvalidFeedException(); } } catch (SAXException e) { successful = false; e.printStackTrace(); reason = DownloadError.ERROR_PARSER_EXCEPTION; reasonDetailed = e.getMessage(); } catch (IOException e) { successful = false; e.printStackTrace(); reason = DownloadError.ERROR_PARSER_EXCEPTION; reasonDetailed = e.getMessage(); } catch (ParserConfigurationException e) { successful = false; e.printStackTrace(); reason = DownloadError.ERROR_PARSER_EXCEPTION; reasonDetailed = e.getMessage(); } catch (UnsupportedFeedtypeException e) { e.printStackTrace(); successful = false; reason = DownloadError.ERROR_UNSUPPORTED_TYPE; reasonDetailed = e.getMessage(); } catch (InvalidFeedException e) { e.printStackTrace(); successful = false; reason = DownloadError.ERROR_PARSER_EXCEPTION; reasonDetailed = e.getMessage(); } // cleanup(); if (successful) { // we create a 'successful' download log if the feed's last refresh failed List<DownloadStatus> log = DBReader.getFeedDownloadLog(feed); if(log.size() > 0 && log.get(0).isSuccessful() == false) { saveDownloadStatus(new DownloadStatus(feed, feed.getHumanReadableIdentifier(), DownloadError.SUCCESS, successful, reasonDetailed)); } return Pair.create(request, result); } else { numberOfDownloads.decrementAndGet(); saveDownloadStatus(new DownloadStatus(feed, feed.getHumanReadableIdentifier(), reason, successful, reasonDetailed)); return null; } } /** * Checks if the feed was parsed correctly. */ private boolean checkFeedData(Feed feed) { if (feed.getTitle() == null) { Log.e(TAG, "Feed has no title."); return false; } if (!hasValidFeedItems(feed)) { Log.e(TAG, "Feed has invalid items"); return false; } return true; } /** * Checks if the FeedItems of this feed have images that point * to the same URL. If two FeedItems have an image that points to * the same URL, the reference of the second item is removed, so that every image * reference is unique. */ private void removeDuplicateImages(Feed feed) { for (int x = 0; x < feed.getItems().size(); x++) { for (int y = x + 1; y < feed.getItems().size(); y++) { FeedItem item1 = feed.getItems().get(x); FeedItem item2 = feed.getItems().get(y); if (item1.hasItemImage() && item2.hasItemImage()) { if (StringUtils.equals(item1.getImage().getDownload_url(), item2.getImage().getDownload_url())) { item2.setImage(null); } } } } } private boolean hasValidFeedItems(Feed feed) { for (FeedItem item : feed.getItems()) { if (item.getTitle() == null) { Log.e(TAG, "Item has no title"); return false; } if (item.getPubDate() == null) { Log.e(TAG, "Item has no pubDate. Using current time as pubDate"); if (item.getTitle() != null) { Log.e(TAG, "Title of invalid item: " + item.getTitle()); } item.setPubDate(new Date()); } } return true; } /** * Delete files that aren't needed anymore */ private void cleanup(Feed feed) { if (feed.getFile_url() != null) { if (new File(feed.getFile_url()).delete()) { Log.d(TAG, "Successfully deleted cache file."); } else { Log.e(TAG, "Failed to delete cache file."); } feed.setFile_url(null); } else { Log.d(TAG, "Didn't delete cache file: File url is not set."); } } public void shutdown() { isActive = false; if (isCollectingRequests) { interrupt(); } } public void submitCompletedDownload(DownloadRequest request) { completedRequests.offer(request); if (isCollectingRequests) { interrupt(); } } } /** * Handles failed downloads. * <p/> * If the file has been partially downloaded, this handler will set the file_url of the FeedFile to the location * of the downloaded file. * <p/> * Currently, this handler only handles FeedMedia objects, because Feeds and FeedImages are deleted if the download fails. */ class FailedDownloadHandler implements Runnable { private DownloadRequest request; private DownloadStatus status; FailedDownloadHandler(DownloadStatus status, DownloadRequest request) { this.request = request; this.status = status; } @Override public void run() { if(request.getFeedfileType() == Feed.FEEDFILETYPE_FEED) { DBWriter.setFeedLastUpdateFailed(request.getFeedfileId(), true); } else if (request.isDeleteOnFailure()) { Log.d(TAG, "Ignoring failed download, deleteOnFailure=true"); } else { File dest = new File(request.getDestination()); if (dest.exists() && request.getFeedfileType() == FeedMedia.FEEDFILETYPE_FEEDMEDIA) { Log.d(TAG, "File has been partially downloaded. Writing file url"); FeedMedia media = DBReader.getFeedMedia(request.getFeedfileId()); media.setFile_url(request.getDestination()); try { DBWriter.setFeedMedia(media).get(); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } } } } /** * Handles a completed media download. */ class MediaHandlerThread implements Runnable { private DownloadRequest request; private DownloadStatus status; public MediaHandlerThread(DownloadStatus status, DownloadRequest request) { Validate.notNull(status); Validate.notNull(request); this.status = status; this.request = request; } @Override public void run() { FeedMedia media = DBReader.getFeedMedia(request.getFeedfileId()); if (media == null) { throw new IllegalStateException( "Could not find downloaded media object in database"); } boolean chaptersRead = false; media.setDownloaded(true); media.setFile_url(request.getDestination()); media.setHasEmbeddedPicture(null); // Get duration MediaMetadataRetriever mmr = null; try { mmr = new MediaMetadataRetriever(); mmr.setDataSource(media.getFile_url()); String durationStr = mmr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION); media.setDuration(Integer.parseInt(durationStr)); Log.d(TAG, "Duration of file is " + media.getDuration()); } catch (NumberFormatException e) { e.printStackTrace(); } catch (RuntimeException e) { e.printStackTrace(); } finally { if (mmr != null) { mmr.release(); } } if (media.getItem().getChapters() == null) { ChapterUtils.loadChaptersFromFileUrl(media); if (media.getItem().getChapters() != null) { chaptersRead = true; } } try { // we've received the media, we don't want to autodownload it again FeedItem item = media.getItem(); item.setAutoDownload(false); // update the db DBWriter.setFeedItem(item).get(); DBWriter.setFeedMedia(media).get(); if (!DBTasks.isInQueue(DownloadService.this, item.getId())) { DBWriter.addQueueItem(DownloadService.this, item).get(); } } catch (ExecutionException e) { e.printStackTrace(); status = new DownloadStatus(media, media.getEpisodeTitle(), DownloadError.ERROR_DB_ACCESS_ERROR, false, e.getMessage()); } catch (InterruptedException e) { e.printStackTrace(); status = new DownloadStatus(media, media.getEpisodeTitle(), DownloadError.ERROR_DB_ACCESS_ERROR, false, e.getMessage()); } saveDownloadStatus(status); if(GpodnetPreferences.loggedIn()) { FeedItem item = media.getItem(); GpodnetEpisodeAction action = new GpodnetEpisodeAction.Builder(item, Action.DOWNLOAD) .currentDeviceId() .currentTimestamp() .build(); GpodnetPreferences.enqueueEpisodeAction(action); } numberOfDownloads.decrementAndGet(); queryDownloadsAsync(); } } /** * Schedules the notification updater task if it hasn't been scheduled yet. */ private void setupNotificationUpdater() { Log.d(TAG, "Setting up notification updater"); if (notificationUpdater == null) { notificationUpdater = new NotificationUpdater(); notificationUpdaterFuture = schedExecutor.scheduleAtFixedRate( notificationUpdater, 5L, 5L, TimeUnit.SECONDS); } } private void cancelNotificationUpdater() { boolean result = false; if (notificationUpdaterFuture != null) { result = notificationUpdaterFuture.cancel(true); } notificationUpdater = null; notificationUpdaterFuture = null; Log.d(TAG, "NotificationUpdater cancelled. Result: " + result); } private class NotificationUpdater implements Runnable { public void run() { handler.post(new Runnable() { @Override public void run() { Notification n = updateNotifications(); if (n != null) { NotificationManager nm = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); nm.notify(NOTIFICATION_ID, n); } } }); } } private long lastPost = 0; final Runnable postDownloaderTask = new Runnable() { @Override public void run() { List<Downloader> list = Collections.unmodifiableList(downloads); EventBus.getDefault().postSticky(DownloadEvent.refresh(list)); postHandler.postDelayed(postDownloaderTask, 1500); } }; private void postDownloaders() { long now = System.currentTimeMillis(); if(now - lastPost >= 250) { postHandler.removeCallbacks(postDownloaderTask); postDownloaderTask.run(); lastPost = now; } } }
package org.xdi.oxauth.model.ldap; import org.gluu.site.ldap.persistence.annotation.LdapAttribute; import org.gluu.site.ldap.persistence.annotation.LdapDN; import org.gluu.site.ldap.persistence.annotation.LdapEntry; import org.gluu.site.ldap.persistence.annotation.LdapObjectClass; import java.io.Serializable; import java.util.Date; /** * @author Yuriy Zabrovarnyy * @author Javier Rojas Blum * @version September 5, 2016 */ @LdapEntry @LdapObjectClass(values = {"top", "oxAuthToken"}) public class TokenLdap implements Serializable { @LdapDN private String dn; @LdapAttribute(name = "uniqueIdentifier") private String id; @LdapAttribute(name = "oxAuthGrantId") private String grantId; @LdapAttribute(name = "oxAuthUserId") private String userId; @LdapAttribute(name = "oxAuthClientId") private String clientId; @LdapAttribute(name = "oxAuthCreation") private Date creationDate; @LdapAttribute(name = "oxAuthExpiration") private Date expirationDate; @LdapAttribute(name = "oxAuthAuthenticationTime") private Date authenticationTime; @LdapAttribute(name = "oxAuthScope") private String scope; @LdapAttribute(name = "oxAuthTokenCode") private String tokenCode; @LdapAttribute(name = "oxAuthTokenType") private String tokenType; @LdapAttribute(name = "oxAuthGrantType") private String grantType; @LdapAttribute(name = "oxAuthJwtRequest") private String jwtRequest; @LdapAttribute(name = "oxAuthAuthorizationCode") private String authorizationCode; @LdapAttribute(name = "oxAuthNonce") private String nonce; @LdapAttribute(name = "oxCodeChallenge") private String codeChallenge; @LdapAttribute(name = "oxCodeChallengeMethod") private String codeChallengeMethod; @LdapAttribute(name = "oxAuthenticationMode") private String authMode; @LdapAttribute(name = "oxAuthSessionDn") private String sessionDn; private boolean isFromCache; public TokenLdap() { } public String getId() { return id; } public void setId(String p_id) { id = p_id; } public String getAuthorizationCode() { return authorizationCode; } public void setAuthorizationCode(String p_authorizationCode) { authorizationCode = p_authorizationCode; } public String getNonce() { return nonce; } public void setNonce(String nonce) { this.nonce = nonce; } public String getGrantId() { return grantId; } public void setGrantId(String p_grantId) { grantId = p_grantId; } public Date getAuthenticationTime() { return authenticationTime; } public void setAuthenticationTime(Date p_authenticationTime) { authenticationTime = p_authenticationTime; } public Date getCreationDate() { return creationDate; } public void setCreationDate(Date p_creationDate) { creationDate = p_creationDate; } public String getDn() { return dn; } public void setDn(String p_dn) { dn = p_dn; } public Date getExpirationDate() { return expirationDate; } public void setExpirationDate(Date p_expirationDate) { expirationDate = p_expirationDate; } public String getGrantType() { return grantType; } public void setGrantType(String p_grantType) { grantType = p_grantType; } public String getScope() { return scope; } public void setScope(String p_scope) { scope = p_scope; } public String getTokenCode() { return tokenCode; } public void setTokenCode(String p_tokenCode) { tokenCode = p_tokenCode; } public String getTokenType() { return tokenType; } public void setTokenType(String p_tokenType) { tokenType = p_tokenType; } public TokenType getTokenTypeEnum() { return TokenType.fromValue(tokenType); } public void setTokenTypeEnum(TokenType p_tokenType) { if (p_tokenType != null) { tokenType = p_tokenType.getValue(); } } public String getUserId() { return userId; } public void setUserId(String p_userId) { userId = p_userId; } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public String getJwtRequest() { return jwtRequest; } public void setJwtRequest(String p_jwtRequest) { jwtRequest = p_jwtRequest; } public String getAuthMode() { return authMode; } public void setAuthMode(String authMode) { this.authMode = authMode; } public String getCodeChallenge() { return codeChallenge; } public void setCodeChallenge(String codeChallenge) { this.codeChallenge = codeChallenge; } public String getCodeChallengeMethod() { return codeChallengeMethod; } public void setCodeChallengeMethod(String codeChallengeMethod) { this.codeChallengeMethod = codeChallengeMethod; } public String getSessionDn() { return sessionDn; } public void setSessionDn(String sessionDn) { this.sessionDn = sessionDn; } public boolean isFromCache() { return isFromCache; } public void setIsFromCache(boolean isFromCache) { this.isFromCache = isFromCache; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TokenLdap tokenLdap = (TokenLdap) o; if (tokenCode != null ? !tokenCode.equals(tokenLdap.tokenCode) : tokenLdap.tokenCode != null) return false; if (tokenType != null ? !tokenType.equals(tokenLdap.tokenType) : tokenLdap.tokenType != null) return false; return true; } @Override public int hashCode() { int result = tokenCode != null ? tokenCode.hashCode() : 0; result = 31 * result + (tokenType != null ? tokenType.hashCode() : 0); return result; } }
package org.jaudiotagger.tag.id3.framebody; import org.jaudiotagger.logging.ErrorMessage; import org.jaudiotagger.tag.InvalidTagException; import org.jaudiotagger.tag.datatype.*; import org.jaudiotagger.tag.id3.ID3TextEncodingConversion; import org.jaudiotagger.tag.id3.ID3v24Frames; import org.jaudiotagger.tag.id3.valuepair.TextEncoding; import org.jaudiotagger.tag.reference.Languages; import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; public class FrameBodyCOMM extends AbstractID3v2FrameBody implements ID3v24FrameBody, ID3v23FrameBody { /** * Creates a new FrameBodyCOMM datatype. */ public FrameBodyCOMM() { setObjectValue(DataTypes.OBJ_TEXT_ENCODING, TextEncoding.ISO_8859_1); setObjectValue(DataTypes.OBJ_LANGUAGE, Languages.DEFAULT_ID); setObjectValue(DataTypes.OBJ_DESCRIPTION, ""); setObjectValue(DataTypes.OBJ_TEXT, ""); } public FrameBodyCOMM(FrameBodyCOMM body) { super(body); } /** * Creates a new FrameBodyCOMM datatype. * * @param textEncoding * @param language * @param description * @param text */ public FrameBodyCOMM(byte textEncoding, String language, String description, String text) { setObjectValue(DataTypes.OBJ_TEXT_ENCODING, textEncoding); setObjectValue(DataTypes.OBJ_LANGUAGE, language); setObjectValue(DataTypes.OBJ_DESCRIPTION, description); setObjectValue(DataTypes.OBJ_TEXT, text); } /** * Construct a Comment frame body from the buffer * * @param byteBuffer * @param frameSize * @throws InvalidTagException if unable to create framebody from buffer */ public FrameBodyCOMM(ByteBuffer byteBuffer, int frameSize) throws InvalidTagException { super(byteBuffer, frameSize); } /** * Set the description field, which describes the type of comment * * @param description */ public void setDescription(String description) { if (description == null) { throw new IllegalArgumentException(ErrorMessage.GENERAL_INVALID_NULL_ARGUMENT.getMsg()); } setObjectValue(DataTypes.OBJ_DESCRIPTION, description); } /** * Get the description field, which describes the type of comment * * @return description field */ public String getDescription() { return (String) getObjectValue(DataTypes.OBJ_DESCRIPTION); } /** * The ID3v2 frame identifier * * @return the ID3v2 frame identifier for this frame type */ public String getIdentifier() { return ID3v24Frames.FRAME_ID_COMMENT; } /** * Sets the language the comment is written in * * @param language */ public void setLanguage(String language) { //TODO not sure if this might break existing code setObjectValue(DataTypes.OBJ_LANGUAGE, language); } /** * Get the language the comment is written in * * @return the language */ public String getLanguage() { return (String) getObjectValue(DataTypes.OBJ_LANGUAGE); } /** * @param text */ public void setText(String text) { if (text == null) { throw new IllegalArgumentException(ErrorMessage.GENERAL_INVALID_NULL_ARGUMENT.getMsg()); } setObjectValue(DataTypes.OBJ_TEXT, text); } /** * Returns the the text field which holds the comment, adjusted to ensure does not return trailing null * which is due to a iTunes bug. * * @return the text field */ public String getText() { TextEncodedStringSizeTerminated text = (TextEncodedStringSizeTerminated) getObject(DataTypes.OBJ_TEXT); return text.getValueAtIndex(0); } protected void setupObjectList() { objectList.add(new NumberHashMap(DataTypes.OBJ_TEXT_ENCODING, this, TextEncoding.TEXT_ENCODING_FIELD_SIZE)); objectList.add(new StringHashMap(DataTypes.OBJ_LANGUAGE, this, Languages.LANGUAGE_FIELD_SIZE)); objectList.add(new TextEncodedStringNullTerminated(DataTypes.OBJ_DESCRIPTION, this)); objectList.add(new TextEncodedStringSizeTerminated(DataTypes.OBJ_TEXT, this)); } /** * Because COMM have a text encoding we need to check the text String does * not contain characters that cannot be encoded in current encoding before * we write data. If there are we change the encoding. */ public void write(ByteArrayOutputStream tagBuffer) { //Ensure valid for type setTextEncoding(ID3TextEncodingConversion.getTextEncoding(getHeader(), getTextEncoding())); //Ensure valid for data if (((AbstractString) getObject(DataTypes.OBJ_TEXT)).canBeEncoded() == false) { this.setTextEncoding(ID3TextEncodingConversion.getUnicodeTextEncoding(getHeader())); } if (((AbstractString) getObject(DataTypes.OBJ_DESCRIPTION)).canBeEncoded() == false) { this.setTextEncoding(ID3TextEncodingConversion.getUnicodeTextEncoding(getHeader())); } super.write(tagBuffer); } }
package de.danoeh.antennapod.core.service.playback; import android.annotation.SuppressLint; import android.app.Notification; import android.app.PendingIntent; import android.app.Service; import android.content.BroadcastReceiver; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.media.AudioManager; import android.media.MediaMetadataRetriever; import android.media.MediaPlayer; import android.media.RemoteControlClient; import android.media.RemoteControlClient.MetadataEditor; import android.os.AsyncTask; import android.os.Binder; import android.os.Build; import android.os.IBinder; import android.preference.PreferenceManager; import android.support.v4.app.NotificationCompat; import android.util.Log; import android.util.Pair; import android.view.KeyEvent; import android.view.SurfaceHolder; import android.widget.Toast; import com.squareup.picasso.Picasso; import org.apache.commons.lang3.StringUtils; import java.io.IOException; import java.util.List; import de.danoeh.antennapod.core.ClientConfig; import de.danoeh.antennapod.core.R; import de.danoeh.antennapod.core.feed.Chapter; import de.danoeh.antennapod.core.feed.FeedItem; import de.danoeh.antennapod.core.feed.FeedMedia; import de.danoeh.antennapod.core.feed.MediaType; import de.danoeh.antennapod.core.gpoddernet.model.GpodnetEpisodeAction; import de.danoeh.antennapod.core.gpoddernet.model.GpodnetEpisodeAction.Action; import de.danoeh.antennapod.core.preferences.GpodnetPreferences; import de.danoeh.antennapod.core.preferences.PlaybackPreferences; import de.danoeh.antennapod.core.preferences.UserPreferences; import de.danoeh.antennapod.core.receiver.MediaButtonReceiver; import de.danoeh.antennapod.core.storage.DBTasks; import de.danoeh.antennapod.core.storage.DBWriter; import de.danoeh.antennapod.core.util.QueueAccess; import de.danoeh.antennapod.core.util.flattr.FlattrUtils; import de.danoeh.antennapod.core.util.playback.Playable; /** * Controls the MediaPlayer that plays a FeedMedia-file */ public class PlaybackService extends Service { public static final String FORCE_WIDGET_UPDATE = "de.danoeh.antennapod.FORCE_WIDGET_UPDATE"; public static final String STOP_WIDGET_UPDATE = "de.danoeh.antennapod.STOP_WIDGET_UPDATE"; /** * Logging tag */ private static final String TAG = "PlaybackService"; /** * Parcelable of type Playable. */ public static final String EXTRA_PLAYABLE = "PlaybackService.PlayableExtra"; /** * True if media should be streamed. */ public static final String EXTRA_SHOULD_STREAM = "extra.de.danoeh.antennapod.core.service.shouldStream"; /** * True if playback should be started immediately after media has been * prepared. */ public static final String EXTRA_START_WHEN_PREPARED = "extra.de.danoeh.antennapod.core.service.startWhenPrepared"; public static final String EXTRA_PREPARE_IMMEDIATELY = "extra.de.danoeh.antennapod.core.service.prepareImmediately"; public static final String ACTION_PLAYER_STATUS_CHANGED = "action.de.danoeh.antennapod.core.service.playerStatusChanged"; public static final String EXTRA_NEW_PLAYER_STATUS = "extra.de.danoeh.antennapod.service.playerStatusChanged.newStatus"; private static final String AVRCP_ACTION_PLAYER_STATUS_CHANGED = "com.android.music.playstatechanged"; private static final String AVRCP_ACTION_META_CHANGED = "com.android.music.metachanged"; public static final String ACTION_PLAYER_NOTIFICATION = "action.de.danoeh.antennapod.core.service.playerNotification"; public static final String EXTRA_NOTIFICATION_CODE = "extra.de.danoeh.antennapod.core.service.notificationCode"; public static final String EXTRA_NOTIFICATION_TYPE = "extra.de.danoeh.antennapod.core.service.notificationType"; /** * If the PlaybackService receives this action, it will stop playback and * try to shutdown. */ public static final String ACTION_SHUTDOWN_PLAYBACK_SERVICE = "action.de.danoeh.antennapod.core.service.actionShutdownPlaybackService"; /** * If the PlaybackService receives this action, it will end playback of the * current episode and load the next episode if there is one available. */ public static final String ACTION_SKIP_CURRENT_EPISODE = "action.de.danoeh.antennapod.core.service.skipCurrentEpisode"; /** * If the PlaybackService receives this action, it will pause playback. */ public static final String ACTION_PAUSE_PLAY_CURRENT_EPISODE = "action.de.danoeh.antennapod.core.service.pausePlayCurrentEpisode"; /** * If the PlaybackService receives this action, it will resume playback. */ public static final String ACTION_RESUME_PLAY_CURRENT_EPISODE = "action.de.danoeh.antennapod.core.service.resumePlayCurrentEpisode"; /** * Used in NOTIFICATION_TYPE_RELOAD. */ public static final int EXTRA_CODE_AUDIO = 1; public static final int EXTRA_CODE_VIDEO = 2; public static final int NOTIFICATION_TYPE_ERROR = 0; public static final int NOTIFICATION_TYPE_INFO = 1; public static final int NOTIFICATION_TYPE_BUFFER_UPDATE = 2; /** * Receivers of this intent should update their information about the curently playing media */ public static final int NOTIFICATION_TYPE_RELOAD = 3; /** * The state of the sleeptimer changed. */ public static final int NOTIFICATION_TYPE_SLEEPTIMER_UPDATE = 4; public static final int NOTIFICATION_TYPE_BUFFER_START = 5; public static final int NOTIFICATION_TYPE_BUFFER_END = 6; /** * No more episodes are going to be played. */ public static final int NOTIFICATION_TYPE_PLAYBACK_END = 7; /** * Playback speed has changed */ public static final int NOTIFICATION_TYPE_PLAYBACK_SPEED_CHANGE = 8; /** * Returned by getPositionSafe() or getDurationSafe() if the playbackService * is in an invalid state. */ public static final int INVALID_TIME = -1; /** * Is true if service is running. */ public static boolean isRunning = false; /** * Is true if service has received a valid start command. */ public static boolean started = false; /** * Is true if the service was running, but paused due to headphone disconnect */ public static boolean transientPause = false; private static final int NOTIFICATION_ID = 1; private RemoteControlClient remoteControlClient; private PlaybackServiceMediaPlayer mediaPlayer; private PlaybackServiceTaskManager taskManager; private int startPosition; private static volatile MediaType currentMediaType = MediaType.UNKNOWN; private final IBinder mBinder = new LocalBinder(); public class LocalBinder extends Binder { public PlaybackService getService() { return PlaybackService.this; } } @Override public boolean onUnbind(Intent intent) { Log.d(TAG, "Received onUnbind event"); return super.onUnbind(intent); } /** * Returns an intent which starts an audio- or videoplayer, depending on the * type of media that is being played. If the playbackservice is not * running, the type of the last played media will be looked up. */ public static Intent getPlayerActivityIntent(Context context) { if (isRunning) { return ClientConfig.playbackServiceCallbacks.getPlayerActivityIntent(context, currentMediaType); } else { if (PlaybackPreferences.getCurrentEpisodeIsVideo()) { return ClientConfig.playbackServiceCallbacks.getPlayerActivityIntent(context, MediaType.VIDEO); } else { return ClientConfig.playbackServiceCallbacks.getPlayerActivityIntent(context, MediaType.AUDIO); } } } /** * Same as getPlayerActivityIntent(context), but here the type of activity * depends on the FeedMedia that is provided as an argument. */ public static Intent getPlayerActivityIntent(Context context, Playable media) { MediaType mt = media.getMediaType(); return ClientConfig.playbackServiceCallbacks.getPlayerActivityIntent(context, mt); } @SuppressLint("NewApi") @Override public void onCreate() { super.onCreate(); Log.d(TAG, "Service created."); isRunning = true; registerReceiver(headsetDisconnected, new IntentFilter( Intent.ACTION_HEADSET_PLUG)); registerReceiver(shutdownReceiver, new IntentFilter( ACTION_SHUTDOWN_PLAYBACK_SERVICE)); registerReceiver(bluetoothStateUpdated, new IntentFilter( AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED)); registerReceiver(audioBecomingNoisy, new IntentFilter( AudioManager.ACTION_AUDIO_BECOMING_NOISY)); registerReceiver(skipCurrentEpisodeReceiver, new IntentFilter( ACTION_SKIP_CURRENT_EPISODE)); registerReceiver(pausePlayCurrentEpisodeReceiver, new IntentFilter( ACTION_PAUSE_PLAY_CURRENT_EPISODE)); registerReceiver(pauseResumeCurrentEpisodeReceiver, new IntentFilter( ACTION_RESUME_PLAY_CURRENT_EPISODE)); remoteControlClient = setupRemoteControlClient(); taskManager = new PlaybackServiceTaskManager(this, taskManagerCallback); mediaPlayer = new PlaybackServiceMediaPlayer(this, mediaPlayerCallback); } @SuppressLint("NewApi") @Override public void onDestroy() { super.onDestroy(); Log.d(TAG, "Service is about to be destroyed"); isRunning = false; started = false; currentMediaType = MediaType.UNKNOWN; unregisterReceiver(headsetDisconnected); unregisterReceiver(shutdownReceiver); unregisterReceiver(bluetoothStateUpdated); unregisterReceiver(audioBecomingNoisy); unregisterReceiver(skipCurrentEpisodeReceiver); unregisterReceiver(pausePlayCurrentEpisodeReceiver); unregisterReceiver(pauseResumeCurrentEpisodeReceiver); mediaPlayer.shutdown(); taskManager.shutdown(); } @Override public IBinder onBind(Intent intent) { Log.d(TAG, "Received onBind event"); return mBinder; } @Override public int onStartCommand(Intent intent, int flags, int startId) { super.onStartCommand(intent, flags, startId); Log.d(TAG, "OnStartCommand called"); final int keycode = intent.getIntExtra(MediaButtonReceiver.EXTRA_KEYCODE, -1); final Playable playable = intent.getParcelableExtra(EXTRA_PLAYABLE); if (keycode == -1 && playable == null) { Log.e(TAG, "PlaybackService was started with no arguments"); stopSelf(); } if ((flags & Service.START_FLAG_REDELIVERY) != 0) { Log.d(TAG, "onStartCommand is a redelivered intent, calling stopForeground now."); stopForeground(true); } else { if (keycode != -1) { Log.d(TAG, "Received media button event"); handleKeycode(keycode); } else { started = true; boolean stream = intent.getBooleanExtra(EXTRA_SHOULD_STREAM, true); boolean startWhenPrepared = intent.getBooleanExtra(EXTRA_START_WHEN_PREPARED, false); boolean prepareImmediately = intent.getBooleanExtra(EXTRA_PREPARE_IMMEDIATELY, false); sendNotificationBroadcast(NOTIFICATION_TYPE_RELOAD, 0); mediaPlayer.playMediaObject(playable, stream, startWhenPrepared, prepareImmediately); } } return Service.START_REDELIVER_INTENT; } /** * Handles media button events */ private void handleKeycode(int keycode) { Log.d(TAG, "Handling keycode: " + keycode); final PlaybackServiceMediaPlayer.PSMPInfo info = mediaPlayer.getPSMPInfo(); final PlayerStatus status = info.playerStatus; switch (keycode) { case KeyEvent.KEYCODE_HEADSETHOOK: case KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE: if (status == PlayerStatus.PLAYING) { if (UserPreferences.isPersistNotify()) { mediaPlayer.pause(false, true); } else { mediaPlayer.pause(true, true); } } else if (status == PlayerStatus.PAUSED || status == PlayerStatus.PREPARED) { mediaPlayer.resume(); } else if (status == PlayerStatus.PREPARING) { mediaPlayer.setStartWhenPrepared(!mediaPlayer.isStartWhenPrepared()); } else if (status == PlayerStatus.INITIALIZED) { mediaPlayer.setStartWhenPrepared(true); mediaPlayer.prepare(); } break; case KeyEvent.KEYCODE_MEDIA_PLAY: if (status == PlayerStatus.PAUSED || status == PlayerStatus.PREPARED) { mediaPlayer.resume(); } else if (status == PlayerStatus.INITIALIZED) { mediaPlayer.setStartWhenPrepared(true); mediaPlayer.prepare(); } break; case KeyEvent.KEYCODE_MEDIA_PAUSE: if (status == PlayerStatus.PLAYING) { mediaPlayer.pause(false, true); } if (UserPreferences.isPersistNotify()) { mediaPlayer.pause(false, true); } else { mediaPlayer.pause(true, true); } break; case KeyEvent.KEYCODE_MEDIA_NEXT: case KeyEvent.KEYCODE_MEDIA_FAST_FORWARD: mediaPlayer.seekDelta(UserPreferences.getFastFowardSecs() * 1000); break; case KeyEvent.KEYCODE_MEDIA_PREVIOUS: case KeyEvent.KEYCODE_MEDIA_REWIND: mediaPlayer.seekDelta(-UserPreferences.getRewindSecs() * 1000); break; case KeyEvent.KEYCODE_MEDIA_STOP: if (status == PlayerStatus.PLAYING) { mediaPlayer.pause(true, true); started = false; } stopForeground(true); // gets rid of persistent notification break; default: if (info.playable != null && info.playerStatus == PlayerStatus.PLAYING) { // only notify the user about an unknown key event if it is actually doing something String message = String.format(getResources().getString(R.string.unknown_media_key), keycode); Toast.makeText(this, message, Toast.LENGTH_SHORT).show(); } break; } } /** * Called by a mediaplayer Activity as soon as it has prepared its * mediaplayer. */ public void setVideoSurface(SurfaceHolder sh) { Log.d(TAG, "Setting display"); mediaPlayer.setVideoSurface(sh); } /** * Called when the surface holder of the mediaplayer has to be changed. */ private void resetVideoSurface() { taskManager.cancelPositionSaver(); mediaPlayer.resetVideoSurface(); } public void notifyVideoSurfaceAbandoned() { stopForeground(true); mediaPlayer.resetVideoSurface(); } private final PlaybackServiceTaskManager.PSTMCallback taskManagerCallback = new PlaybackServiceTaskManager.PSTMCallback() { @Override public void positionSaverTick() { saveCurrentPosition(true, PlaybackServiceTaskManager.POSITION_SAVER_WAITING_INTERVAL); } @Override public void onSleepTimerExpired() { mediaPlayer.pause(true, true); sendNotificationBroadcast(NOTIFICATION_TYPE_SLEEPTIMER_UPDATE, 0); } @Override public void onWidgetUpdaterTick() { updateWidget(); } @Override public void onChapterLoaded(Playable media) { sendNotificationBroadcast(NOTIFICATION_TYPE_RELOAD, 0); } }; private final PlaybackServiceMediaPlayer.PSMPCallback mediaPlayerCallback = new PlaybackServiceMediaPlayer.PSMPCallback() { @Override public void statusChanged(PlaybackServiceMediaPlayer.PSMPInfo newInfo) { currentMediaType = mediaPlayer.getCurrentMediaType(); switch (newInfo.playerStatus) { case INITIALIZED: writePlaybackPreferences(); break; case PREPARED: taskManager.startChapterLoader(newInfo.playable); break; case PAUSED: taskManager.cancelPositionSaver(); saveCurrentPosition(false, 0); taskManager.cancelWidgetUpdater(); if (UserPreferences.isPersistNotify() && android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { // do not remove notification on pause based on user pref and whether android version supports expanded notifications // Change [Play] button to [Pause] setupNotification(newInfo); } else if (!UserPreferences.isPersistNotify()) { // remove notifcation on pause stopForeground(true); } writePlayerStatusPlaybackPreferences(); final Playable playable = mediaPlayer.getPSMPInfo().playable; // Gpodder: send play action if(GpodnetPreferences.loggedIn() && playable instanceof FeedMedia) { FeedMedia media = (FeedMedia) playable; FeedItem item = media.getItem(); GpodnetEpisodeAction action = new GpodnetEpisodeAction.Builder(item, Action.PLAY) .currentDeviceId() .currentTimestamp() .started(startPosition / 1000) .position(getCurrentPosition() / 1000) .total(getDuration() / 1000) .build(); GpodnetPreferences.enqueueEpisodeAction(action); } break; case STOPPED: //setCurrentlyPlayingMedia(PlaybackPreferences.NO_MEDIA_PLAYING); //stopSelf(); break; case PLAYING: Log.d(TAG, "Audiofocus successfully requested"); Log.d(TAG, "Resuming/Starting playback"); taskManager.startPositionSaver(); taskManager.startWidgetUpdater(); writePlayerStatusPlaybackPreferences(); setupNotification(newInfo); started = true; startPosition = mediaPlayer.getPosition(); break; case ERROR: writePlaybackPreferencesNoMediaPlaying(); break; } Intent statusUpdate = new Intent(ACTION_PLAYER_STATUS_CHANGED); // statusUpdate.putExtra(EXTRA_NEW_PLAYER_STATUS, newInfo.playerStatus.ordinal()); sendBroadcast(statusUpdate); updateWidget(); refreshRemoteControlClientState(newInfo); bluetoothNotifyChange(newInfo, AVRCP_ACTION_PLAYER_STATUS_CHANGED); bluetoothNotifyChange(newInfo, AVRCP_ACTION_META_CHANGED); } @Override public void shouldStop() { stopSelf(); } @Override public void playbackSpeedChanged(float s) { sendNotificationBroadcast( NOTIFICATION_TYPE_PLAYBACK_SPEED_CHANGE, 0); } @Override public void onBufferingUpdate(int percent) { sendNotificationBroadcast(NOTIFICATION_TYPE_BUFFER_UPDATE, percent); } @Override public boolean onMediaPlayerInfo(int code) { switch (code) { case MediaPlayer.MEDIA_INFO_BUFFERING_START: sendNotificationBroadcast(NOTIFICATION_TYPE_BUFFER_START, 0); return true; case MediaPlayer.MEDIA_INFO_BUFFERING_END: sendNotificationBroadcast(NOTIFICATION_TYPE_BUFFER_END, 0); return true; default: return false; } } @Override public boolean onMediaPlayerError(Object inObj, int what, int extra) { final String TAG = "PlaybackService.onErrorListener"; Log.w(TAG, "An error has occured: " + what + " " + extra); if (mediaPlayer.getPSMPInfo().playerStatus == PlayerStatus.PLAYING) { mediaPlayer.pause(true, false); } sendNotificationBroadcast(NOTIFICATION_TYPE_ERROR, what); writePlaybackPreferencesNoMediaPlaying(); stopSelf(); return true; } @Override public boolean endPlayback(boolean playNextEpisode) { PlaybackService.this.endPlayback(true); return true; } @Override public RemoteControlClient getRemoteControlClient() { return remoteControlClient; } }; private void endPlayback(boolean playNextEpisode) { Log.d(TAG, "Playback ended"); final Playable playable = mediaPlayer.getPSMPInfo().playable; if (playable == null) { Log.e(TAG, "Cannot end playback: media was null"); return; } taskManager.cancelPositionSaver(); boolean isInQueue = false; FeedItem nextItem = null; if (playable instanceof FeedMedia) { FeedMedia media = (FeedMedia) playable; FeedItem item = media.getItem(); DBWriter.markItemRead(PlaybackService.this, item, true, true); try { final List<FeedItem> queue = taskManager.getQueue(); isInQueue = QueueAccess.ItemListAccess(queue).contains(item.getId()); nextItem = DBTasks.getQueueSuccessorOfItem(this, item.getId(), queue); } catch (InterruptedException e) { e.printStackTrace(); // isInQueue remains false } if (isInQueue) { DBWriter.removeQueueItem(PlaybackService.this, item, true); } DBWriter.addItemToPlaybackHistory(PlaybackService.this, media); // auto-flattr if enabled if (isAutoFlattrable(media) && UserPreferences.getAutoFlattrPlayedDurationThreshold() == 1.0f) { DBTasks.flattrItemIfLoggedIn(PlaybackService.this, item); } // Delete episode if enabled if(UserPreferences.isAutoDelete()) { DBWriter.deleteFeedMediaOfItem(PlaybackService.this, media.getId()); Log.d(TAG, "Episode Deleted"); } // gpodder play action if(GpodnetPreferences.loggedIn()) { GpodnetEpisodeAction action = new GpodnetEpisodeAction.Builder(item, Action.PLAY) .currentDeviceId() .currentTimestamp() .started(startPosition / 1000) .position(getDuration() / 1000) .total(getDuration() / 1000) .build(); GpodnetPreferences.enqueueEpisodeAction(action); } } // Load next episode if previous episode was in the queue and if there // is an episode in the queue left. // Start playback immediately if continuous playback is enabled Playable nextMedia = null; boolean loadNextItem = ClientConfig.playbackServiceCallbacks.useQueue() && isInQueue && nextItem != null; playNextEpisode = playNextEpisode && loadNextItem && UserPreferences.isFollowQueue(); if (loadNextItem) { Log.d(TAG, "Loading next item in queue"); nextMedia = nextItem.getMedia(); } final boolean prepareImmediately; final boolean startWhenPrepared; final boolean stream; if (playNextEpisode) { Log.d(TAG, "Playback of next episode will start immediately."); prepareImmediately = startWhenPrepared = true; } else { Log.d(TAG, "No more episodes available to play"); prepareImmediately = startWhenPrepared = false; stopForeground(true); stopWidgetUpdater(); } writePlaybackPreferencesNoMediaPlaying(); if (nextMedia != null) { stream = !nextMedia.localFileAvailable(); mediaPlayer.playMediaObject(nextMedia, stream, startWhenPrepared, prepareImmediately); sendNotificationBroadcast(NOTIFICATION_TYPE_RELOAD, (nextMedia.getMediaType() == MediaType.VIDEO) ? EXTRA_CODE_VIDEO : EXTRA_CODE_AUDIO); } else { sendNotificationBroadcast(NOTIFICATION_TYPE_PLAYBACK_END, 0); mediaPlayer.stop(); //stopSelf(); } } public void setSleepTimer(long waitingTime) { Log.d(TAG, "Setting sleep timer to " + Long.toString(waitingTime) + " milliseconds"); taskManager.setSleepTimer(waitingTime); sendNotificationBroadcast(NOTIFICATION_TYPE_SLEEPTIMER_UPDATE, 0); } public void disableSleepTimer() { taskManager.disableSleepTimer(); sendNotificationBroadcast(NOTIFICATION_TYPE_SLEEPTIMER_UPDATE, 0); } private void writePlaybackPreferencesNoMediaPlaying() { SharedPreferences.Editor editor = PreferenceManager .getDefaultSharedPreferences(getApplicationContext()).edit(); editor.putLong(PlaybackPreferences.PREF_CURRENTLY_PLAYING_MEDIA, PlaybackPreferences.NO_MEDIA_PLAYING); editor.putLong(PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEED_ID, PlaybackPreferences.NO_MEDIA_PLAYING); editor.putLong( PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEEDMEDIA_ID, PlaybackPreferences.NO_MEDIA_PLAYING); editor.putInt( PlaybackPreferences.PREF_CURRENT_PLAYER_STATUS, PlaybackPreferences.PLAYER_STATUS_OTHER); editor.commit(); } private int getCurrentPlayerStatusAsInt(PlayerStatus playerStatus) { int playerStatusAsInt; switch (playerStatus) { case PLAYING: playerStatusAsInt = PlaybackPreferences.PLAYER_STATUS_PLAYING; break; case PAUSED: playerStatusAsInt = PlaybackPreferences.PLAYER_STATUS_PAUSED; break; default: playerStatusAsInt = PlaybackPreferences.PLAYER_STATUS_OTHER; } return playerStatusAsInt; } private void writePlaybackPreferences() { Log.d(TAG, "Writing playback preferences"); SharedPreferences.Editor editor = PreferenceManager .getDefaultSharedPreferences(getApplicationContext()).edit(); PlaybackServiceMediaPlayer.PSMPInfo info = mediaPlayer.getPSMPInfo(); MediaType mediaType = mediaPlayer.getCurrentMediaType(); boolean stream = mediaPlayer.isStreaming(); int playerStatus = getCurrentPlayerStatusAsInt(info.playerStatus); if (info.playable != null) { editor.putLong(PlaybackPreferences.PREF_CURRENTLY_PLAYING_MEDIA, info.playable.getPlayableType()); editor.putBoolean( PlaybackPreferences.PREF_CURRENT_EPISODE_IS_STREAM, stream); editor.putBoolean( PlaybackPreferences.PREF_CURRENT_EPISODE_IS_VIDEO, mediaType == MediaType.VIDEO); if (info.playable instanceof FeedMedia) { FeedMedia fMedia = (FeedMedia) info.playable; editor.putLong( PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEED_ID, fMedia.getItem().getFeed().getId()); editor.putLong( PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEEDMEDIA_ID, fMedia.getId()); } else { editor.putLong( PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEED_ID, PlaybackPreferences.NO_MEDIA_PLAYING); editor.putLong( PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEEDMEDIA_ID, PlaybackPreferences.NO_MEDIA_PLAYING); } info.playable.writeToPreferences(editor); } else { editor.putLong(PlaybackPreferences.PREF_CURRENTLY_PLAYING_MEDIA, PlaybackPreferences.NO_MEDIA_PLAYING); editor.putLong(PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEED_ID, PlaybackPreferences.NO_MEDIA_PLAYING); editor.putLong( PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEEDMEDIA_ID, PlaybackPreferences.NO_MEDIA_PLAYING); } editor.putInt( PlaybackPreferences.PREF_CURRENT_PLAYER_STATUS, playerStatus); editor.commit(); } private void writePlayerStatusPlaybackPreferences() { Log.d(TAG, "Writing player status playback preferences"); SharedPreferences.Editor editor = PreferenceManager .getDefaultSharedPreferences(getApplicationContext()).edit(); PlaybackServiceMediaPlayer.PSMPInfo info = mediaPlayer.getPSMPInfo(); int playerStatus = getCurrentPlayerStatusAsInt(info.playerStatus); editor.putInt( PlaybackPreferences.PREF_CURRENT_PLAYER_STATUS, playerStatus); editor.commit(); } /** * Send ACTION_PLAYER_STATUS_CHANGED without changing the status attribute. */ private void postStatusUpdateIntent() { sendBroadcast(new Intent(ACTION_PLAYER_STATUS_CHANGED)); } private void sendNotificationBroadcast(int type, int code) { Intent intent = new Intent(ACTION_PLAYER_NOTIFICATION); intent.putExtra(EXTRA_NOTIFICATION_TYPE, type); intent.putExtra(EXTRA_NOTIFICATION_CODE, code); sendBroadcast(intent); } /** * Used by setupNotification to load notification data in another thread. */ private AsyncTask<Void, Void, Void> notificationSetupTask; /** * Prepares notification and starts the service in the foreground. */ @SuppressLint("NewApi") private void setupNotification(final PlaybackServiceMediaPlayer.PSMPInfo info) { final PendingIntent pIntent = PendingIntent.getActivity(this, 0, PlaybackService.getPlayerActivityIntent(this), PendingIntent.FLAG_UPDATE_CURRENT); if (notificationSetupTask != null) { notificationSetupTask.cancel(true); } notificationSetupTask = new AsyncTask<Void, Void, Void>() { Bitmap icon = null; @Override protected Void doInBackground(Void... params) { Log.d(TAG, "Starting background work"); if (android.os.Build.VERSION.SDK_INT >= 11) { if (info.playable != null) { try { int iconSize = getResources().getDimensionPixelSize( android.R.dimen.notification_large_icon_width); icon = Picasso.with(PlaybackService.this) .load(info.playable.getImageUri()) .resize(iconSize, iconSize) .get(); } catch (IOException e) { e.printStackTrace(); } } } if (icon == null) { icon = BitmapFactory.decodeResource(getApplicationContext().getResources(), ClientConfig.playbackServiceCallbacks.getNotificationIconResource(getApplicationContext())); } return null; } @Override protected void onPostExecute(Void result) { super.onPostExecute(result); if (mediaPlayer == null) { return; } PlaybackServiceMediaPlayer.PSMPInfo newInfo = mediaPlayer.getPSMPInfo(); final int smallIcon = ClientConfig.playbackServiceCallbacks.getNotificationIconResource(getApplicationContext()); if (!isCancelled() && started && info.playable != null) { String contentText = info.playable.getFeedTitle(); String contentTitle = info.playable.getEpisodeTitle(); Notification notification = null; if (android.os.Build.VERSION.SDK_INT >= 16) { Intent pauseButtonIntent = new Intent( // pause button intent PlaybackService.this, PlaybackService.class); pauseButtonIntent.putExtra( MediaButtonReceiver.EXTRA_KEYCODE, KeyEvent.KEYCODE_MEDIA_PAUSE); PendingIntent pauseButtonPendingIntent = PendingIntent .getService(PlaybackService.this, 0, pauseButtonIntent, PendingIntent.FLAG_UPDATE_CURRENT); Intent playButtonIntent = new Intent( // play button intent PlaybackService.this, PlaybackService.class); playButtonIntent.putExtra( MediaButtonReceiver.EXTRA_KEYCODE, KeyEvent.KEYCODE_MEDIA_PLAY); PendingIntent playButtonPendingIntent = PendingIntent .getService(PlaybackService.this, 1, playButtonIntent, PendingIntent.FLAG_UPDATE_CURRENT); Intent stopButtonIntent = new Intent( // stop button intent PlaybackService.this, PlaybackService.class); stopButtonIntent.putExtra( MediaButtonReceiver.EXTRA_KEYCODE, KeyEvent.KEYCODE_MEDIA_STOP); PendingIntent stopButtonPendingIntent = PendingIntent .getService(PlaybackService.this, 2, stopButtonIntent, PendingIntent.FLAG_UPDATE_CURRENT); Notification.Builder notificationBuilder = new Notification.Builder( PlaybackService.this) .setContentTitle(contentTitle) .setContentText(contentText) .setOngoing(true) .setContentIntent(pIntent) .setLargeIcon(icon) .setSmallIcon(smallIcon) .setPriority(UserPreferences.getNotifyPriority()); // set notification priority if (newInfo.playerStatus == PlayerStatus.PLAYING) { notificationBuilder.addAction(android.R.drawable.ic_media_pause, //pause action getString(R.string.pause_label), pauseButtonPendingIntent); } else { notificationBuilder.addAction(android.R.drawable.ic_media_play, //play action getString(R.string.play_label), playButtonPendingIntent); } if (UserPreferences.isPersistNotify()) { notificationBuilder.addAction(android.R.drawable.ic_menu_close_clear_cancel, // stop action getString(R.string.stop_label), stopButtonPendingIntent); } if (Build.VERSION.SDK_INT >= 21) { notificationBuilder.setStyle(new Notification.MediaStyle() .setMediaSession((android.media.session.MediaSession.Token) mediaPlayer.getSessionToken().getToken()) .setShowActionsInCompactView(0)) .setVisibility(Notification.VISIBILITY_PUBLIC) .setColor(Notification.COLOR_DEFAULT); } notification = notificationBuilder.build(); } else { NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder( PlaybackService.this) .setContentTitle(contentTitle) .setContentText(contentText).setOngoing(true) .setContentIntent(pIntent).setLargeIcon(icon) .setSmallIcon(smallIcon); notification = notificationBuilder.build(); } startForeground(NOTIFICATION_ID, notification); Log.d(TAG, "Notification set up"); } } }; if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.GINGERBREAD_MR1) { notificationSetupTask .executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR); } else { notificationSetupTask.execute(); } } /** * Saves the current position of the media file to the DB * * @param updatePlayedDuration true if played_duration should be updated. This applies only to FeedMedia objects * @param deltaPlayedDuration value by which played_duration should be increased. */ private synchronized void saveCurrentPosition(boolean updatePlayedDuration, int deltaPlayedDuration) { int position = getCurrentPosition(); int duration = getDuration(); float playbackSpeed = getCurrentPlaybackSpeed(); final Playable playable = mediaPlayer.getPSMPInfo().playable; if (position != INVALID_TIME && duration != INVALID_TIME && playable != null) { Log.d(TAG, "Saving current position to " + position); if (updatePlayedDuration && playable instanceof FeedMedia) { FeedMedia media = (FeedMedia) playable; FeedItem item = media.getItem(); media.setPlayedDuration(media.getPlayedDuration() + ((int) (deltaPlayedDuration * playbackSpeed))); // Auto flattr if (isAutoFlattrable(media) && (media.getPlayedDuration() > UserPreferences.getAutoFlattrPlayedDurationThreshold() * duration)) { Log.d(TAG, "saveCurrentPosition: performing auto flattr since played duration " + Integer.toString(media.getPlayedDuration()) + " is " + UserPreferences.getAutoFlattrPlayedDurationThreshold() * 100 + "% of file duration " + Integer.toString(duration)); DBTasks.flattrItemIfLoggedIn(this, item); } } playable.saveCurrentPosition(PreferenceManager .getDefaultSharedPreferences(getApplicationContext()), position ); } } private void stopWidgetUpdater() { taskManager.cancelWidgetUpdater(); sendBroadcast(new Intent(STOP_WIDGET_UPDATE)); } private void updateWidget() { PlaybackService.this.sendBroadcast(new Intent( FORCE_WIDGET_UPDATE)); } public boolean sleepTimerActive() { return taskManager.isSleepTimerActive(); } public long getSleepTimerTimeLeft() { return taskManager.getSleepTimerTimeLeft(); } @SuppressLint("NewApi") private RemoteControlClient setupRemoteControlClient() { if (Build.VERSION.SDK_INT < 14) { return null; } Intent mediaButtonIntent = new Intent(Intent.ACTION_MEDIA_BUTTON); mediaButtonIntent.setComponent(new ComponentName(getPackageName(), MediaButtonReceiver.class.getName())); PendingIntent mediaPendingIntent = PendingIntent.getBroadcast( getApplicationContext(), 0, mediaButtonIntent, 0); remoteControlClient = new RemoteControlClient(mediaPendingIntent); int controlFlags; if (android.os.Build.VERSION.SDK_INT < 16) { controlFlags = RemoteControlClient.FLAG_KEY_MEDIA_PLAY_PAUSE | RemoteControlClient.FLAG_KEY_MEDIA_NEXT; } else { controlFlags = RemoteControlClient.FLAG_KEY_MEDIA_PLAY_PAUSE; } remoteControlClient.setTransportControlFlags(controlFlags); return remoteControlClient; } /** * Refresh player status and metadata. */ @SuppressLint("NewApi") private void refreshRemoteControlClientState(PlaybackServiceMediaPlayer.PSMPInfo info) { if (android.os.Build.VERSION.SDK_INT >= 14) { if (remoteControlClient != null) { switch (info.playerStatus) { case PLAYING: remoteControlClient .setPlaybackState(RemoteControlClient.PLAYSTATE_PLAYING); break; case PAUSED: case INITIALIZED: remoteControlClient .setPlaybackState(RemoteControlClient.PLAYSTATE_PAUSED); break; case STOPPED: remoteControlClient .setPlaybackState(RemoteControlClient.PLAYSTATE_STOPPED); break; case ERROR: remoteControlClient .setPlaybackState(RemoteControlClient.PLAYSTATE_ERROR); break; default: remoteControlClient .setPlaybackState(RemoteControlClient.PLAYSTATE_BUFFERING); } if (info.playable != null) { MetadataEditor editor = remoteControlClient .editMetadata(false); editor.putString(MediaMetadataRetriever.METADATA_KEY_TITLE, info.playable.getEpisodeTitle()); editor.putString(MediaMetadataRetriever.METADATA_KEY_ALBUM, info.playable.getFeedTitle()); editor.apply(); } Log.d(TAG, "RemoteControlClient state was refreshed"); } } } private void bluetoothNotifyChange(PlaybackServiceMediaPlayer.PSMPInfo info, String whatChanged) { boolean isPlaying = false; if (info.playerStatus == PlayerStatus.PLAYING) { isPlaying = true; } if (info.playable != null) { Intent i = new Intent(whatChanged); i.putExtra("id", 1); i.putExtra("artist", ""); i.putExtra("album", info.playable.getFeedTitle()); i.putExtra("track", info.playable.getEpisodeTitle()); i.putExtra("playing", isPlaying); final List<FeedItem> queue = taskManager.getQueueIfLoaded(); if (queue != null) { i.putExtra("ListSize", queue.size()); } i.putExtra("duration", info.playable.getDuration()); i.putExtra("position", info.playable.getPosition()); sendBroadcast(i); } } /** * Pauses playback when the headset is disconnected and the preference is * set */ private BroadcastReceiver headsetDisconnected = new BroadcastReceiver() { private static final String TAG = "headsetDisconnected"; private static final int UNPLUGGED = 0; private static final int PLUGGED = 1; @Override public void onReceive(Context context, Intent intent) { if (StringUtils.equals(intent.getAction(), Intent.ACTION_HEADSET_PLUG)) { int state = intent.getIntExtra("state", -1); if (state != -1) { Log.d(TAG, "Headset plug event. State is " + state); if (state == UNPLUGGED) { Log.d(TAG, "Headset was unplugged during playback."); pauseIfPauseOnDisconnect(); } else if (state == PLUGGED) { Log.d(TAG, "Headset was plugged in during playback."); unpauseIfPauseOnDisconnect(); } } else { Log.e(TAG, "Received invalid ACTION_HEADSET_PLUG intent"); } } } }; private BroadcastReceiver bluetoothStateUpdated = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if (StringUtils.equals(intent.getAction(), AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED)) { int state = intent.getIntExtra(AudioManager.EXTRA_SCO_AUDIO_STATE, -1); int prevState = intent.getIntExtra(AudioManager.EXTRA_SCO_AUDIO_PREVIOUS_STATE, -1); if (state == AudioManager.SCO_AUDIO_STATE_CONNECTED) { Log.d(TAG, "Received bluetooth connection intent"); unpauseIfPauseOnDisconnect(); } } } }; private BroadcastReceiver audioBecomingNoisy = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { // sound is about to change, eg. bluetooth -> speaker Log.d(TAG, "Pausing playback because audio is becoming noisy"); pauseIfPauseOnDisconnect(); } // android.media.AUDIO_BECOMING_NOISY }; /** * Pauses playback if PREF_PAUSE_ON_HEADSET_DISCONNECT was set to true. */ private void pauseIfPauseOnDisconnect() { if (UserPreferences.isPauseOnHeadsetDisconnect()) { if (mediaPlayer.getPlayerStatus() == PlayerStatus.PLAYING) { transientPause = true; } if (UserPreferences.isPersistNotify()) { mediaPlayer.pause(false, true); } else { mediaPlayer.pause(true, true); } } } private void unpauseIfPauseOnDisconnect() { if (transientPause) { transientPause = false; if (UserPreferences.isPauseOnHeadsetDisconnect() && UserPreferences.isUnpauseOnHeadsetReconnect()) { mediaPlayer.resume(); } } } private BroadcastReceiver shutdownReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if (StringUtils.equals(intent.getAction(), ACTION_SHUTDOWN_PLAYBACK_SERVICE)) { stopSelf(); } } }; private BroadcastReceiver skipCurrentEpisodeReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if (StringUtils.equals(intent.getAction(), ACTION_SKIP_CURRENT_EPISODE)) { Log.d(TAG, "Received SKIP_CURRENT_EPISODE intent"); mediaPlayer.endPlayback(); } } }; private BroadcastReceiver pauseResumeCurrentEpisodeReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if (StringUtils.equals(intent.getAction(), ACTION_RESUME_PLAY_CURRENT_EPISODE)) { Log.d(TAG, "Received RESUME_PLAY_CURRENT_EPISODE intent"); mediaPlayer.resume(); } } }; private BroadcastReceiver pausePlayCurrentEpisodeReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if (StringUtils.equals(intent.getAction(), ACTION_PAUSE_PLAY_CURRENT_EPISODE)) { Log.d(TAG, "Received PAUSE_PLAY_CURRENT_EPISODE intent"); mediaPlayer.pause(false, false); } } }; public static MediaType getCurrentMediaType() { return currentMediaType; } public void resume() { mediaPlayer.resume(); } public void prepare() { mediaPlayer.prepare(); } public void pause(boolean abandonAudioFocus, boolean reinit) { mediaPlayer.pause(abandonAudioFocus, reinit); } public void reinit() { mediaPlayer.reinit(); } public PlaybackServiceMediaPlayer.PSMPInfo getPSMPInfo() { return mediaPlayer.getPSMPInfo(); } public PlayerStatus getStatus() { return mediaPlayer.getPSMPInfo().playerStatus; } public Playable getPlayable() { return mediaPlayer.getPSMPInfo().playable; } public void setSpeed(float speed) { mediaPlayer.setSpeed(speed); } public boolean canSetSpeed() { return mediaPlayer.canSetSpeed(); } public float getCurrentPlaybackSpeed() { return mediaPlayer.getPlaybackSpeed(); } public boolean isStartWhenPrepared() { return mediaPlayer.isStartWhenPrepared(); } public void setStartWhenPrepared(boolean s) { mediaPlayer.setStartWhenPrepared(s); } public void seekTo(final int t) { if(mediaPlayer.getPlayerStatus() == PlayerStatus.PLAYING && GpodnetPreferences.loggedIn()) { final Playable playable = mediaPlayer.getPSMPInfo().playable; if (playable instanceof FeedMedia) { FeedMedia media = (FeedMedia) playable; FeedItem item = media.getItem(); GpodnetEpisodeAction action = new GpodnetEpisodeAction.Builder(item, Action.PLAY) .currentDeviceId() .currentTimestamp() .started(startPosition / 1000) .position(getCurrentPosition() / 1000) .total(getDuration() / 1000) .build(); GpodnetPreferences.enqueueEpisodeAction(action); } } mediaPlayer.seekTo(t); if(mediaPlayer.getPlayerStatus() == PlayerStatus.PLAYING ) { startPosition = t; } } public void seekDelta(final int d) { mediaPlayer.seekDelta(d); } /** * @see de.danoeh.antennapod.core.service.playback.PlaybackServiceMediaPlayer#seekToChapter(de.danoeh.antennapod.core.feed.Chapter) */ public void seekToChapter(Chapter c) { mediaPlayer.seekToChapter(c); } /** * call getDuration() on mediaplayer or return INVALID_TIME if player is in * an invalid state. */ public int getDuration() { return mediaPlayer.getDuration(); } /** * call getCurrentPosition() on mediaplayer or return INVALID_TIME if player * is in an invalid state. */ public int getCurrentPosition() { return mediaPlayer.getPosition(); } public boolean isStreaming() { return mediaPlayer.isStreaming(); } public Pair<Integer, Integer> getVideoSize() { return mediaPlayer.getVideoSize(); } private boolean isAutoFlattrable(FeedMedia media) { if (media != null) { FeedItem item = media.getItem(); return item != null && FlattrUtils.hasToken() && UserPreferences.isAutoFlattr() && item.getPaymentLink() != null && item.getFlattrStatus().getUnflattred(); } else { return false; } } }
package com.mitchellbosecke.pebble.utils; import java.lang.reflect.AccessibleObject; import java.lang.reflect.Field; import java.lang.reflect.Member; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import com.mitchellbosecke.pebble.error.AttributeNotFoundException; import com.mitchellbosecke.pebble.error.PebbleException; public class ReflectionUtils { public static Object getAttribute(Context context, Object object, String attributeName, Object[] args) throws PebbleException { if (object == null) { throw new NullPointerException(String.format("Can not get attribute [%s] of null object.", attributeName)); } // hold onto original name for error reporting String originalAttributeName = attributeName; Class<?> clazz = object.getClass(); Object result = null; // first we check maps, as they are a bit of an exception if (args.length == 0 && object instanceof Map && ((Map<?, ?>) object).containsKey(attributeName)) { return ((Map<?, ?>) object).get(attributeName); } Member member = null; if (attributeName != null) { member = findMember(context, object, attributeName, args); } if ((attributeName == null || member == null) && context.isStrictVariables()) { throw new AttributeNotFoundException( String.format( "Attribute [%s] of [%s] does not exist or can not be accessed and strict variables is set to true.", originalAttributeName, clazz)); } try { if (member instanceof Method) { result = ((Method) member).invoke(object, args); } else if (member instanceof Field) { result = ((Field) member).get(object); } } catch (Exception e) { throw new RuntimeException(e); } return result; } private static Member findMember(Context context, Object object, String attributeName, Object[] args) { Class<?> clazz = object.getClass(); Member member = null; // check if it's cached Map<String, Member> memberCache = context.getAttributeCache().get(clazz); if (memberCache != null) { if(memberCache.containsKey(attributeName)){ // quick return return memberCache.get(attributeName); } } else { memberCache = new HashMap<>(); context.getAttributeCache().put(clazz, memberCache); } // capitalize first letter of attribute for the following attempts String attributeCapitalized = Character.toUpperCase(attributeName.charAt(0)) + attributeName.substring(1); // check get method if (member == null) { try { member = clazz.getMethod("get" + attributeCapitalized); } catch (NoSuchMethodException | SecurityException e) { } } // check is method if (member == null) { try { member = clazz.getMethod("is" + attributeCapitalized); } catch (NoSuchMethodException | SecurityException e) { } } // check has method if (member == null) { try { member = clazz.getMethod("has" + attributeCapitalized); } catch (NoSuchMethodException | SecurityException e) { } } // check if attribute is a public method if (member == null) { try { Class<?>[] argClasses = new Class<?>[args.length]; for (int i = 0; i < args.length; i++) { argClasses[i] = args[i].getClass(); } member = clazz.getMethod(attributeName, argClasses); } catch (NoSuchMethodException | SecurityException e) { } } // public field if (member == null && args.length == 0) { try { member = clazz.getField(attributeName); } catch (NoSuchFieldException | SecurityException e) { } } if(member != null){ ((AccessibleObject) member).setAccessible(true); } memberCache.put(attributeName, member); return member; } }
package org.jitsi.videobridge.rtcp; import net.sf.fmj.media.rtp.*; import org.jitsi.impl.neomedia.rtp.translator.*; import org.jitsi.service.neomedia.*; import org.jitsi.service.neomedia.recording.*; import org.jitsi.videobridge.*; import java.util.*; /** * @author George Politis */ public class BridgeSenderReporting { public BridgeSenderReporting(BridgeRTCPTerminationStrategy strategy) { this.strategy = strategy; } public final BridgeRTCPTerminationStrategy strategy; private final Map<Integer, Map<Integer, SenderInformation>> lastSenderInformationMap = new HashMap<Integer, Map<Integer, SenderInformation>>(); /** * Explode the SRs to make them compliant with features from the translator. * * @param outPacket * @return */ public boolean explodeSenderReport(RTCPCompoundPacket outPacket) { if (outPacket.packets == null || outPacket.packets.length == 0 || outPacket.packets[0].type != RTCPPacket.SR) { return false; } RTCPSRPacket senderReport = (RTCPSRPacket) outPacket.packets[0]; Conference conf = strategy.getConference(); if (senderReport == null || conf == null) return false; RTPTranslator rtpTranslator = strategy.getRTPTranslator(); if (rtpTranslator == null || !(rtpTranslator instanceof RTPTranslatorImpl)) return false; RTPTranslatorImpl rtpTranslatorImpl = (RTPTranslatorImpl)rtpTranslator; long ssrc = senderReport.ssrc & 0xFFFFFFFFL; if (ssrc < 1) return false; Integer senderSSRC = senderReport.ssrc; Map<Integer, SenderInformation> receiverSenderInformationMap = getReceiverSenderInformationMap(senderSSRC); Channel srcChannel = conf .findChannelByReceiveSSRC(ssrc, MediaType.VIDEO); if (srcChannel == null || !(srcChannel instanceof RtpChannel)) return false; RtpChannel srcRtpChannel = (RtpChannel)srcChannel; // Send to every channel that receives this sender an SR. for (Content content : conf.getContents()) { if (MediaType.VIDEO.equals(content.getMediaType())) { for (Channel destChannel : content.getChannels()) { if (!(destChannel instanceof RtpChannel) || srcRtpChannel == destChannel) continue; RtpChannel destRtpChannel = (RtpChannel) destChannel; MediaStream stream = destRtpChannel.getStream(); if (stream == null) continue; boolean destIsReceiving = srcRtpChannel.isInLastN(destChannel); if (destIsReceiving && srcRtpChannel instanceof VideoChannel) { VideoChannel srcVideoChannel = (VideoChannel) srcRtpChannel; if (!(destChannel instanceof VideoChannel)) { destIsReceiving = false; } else { VideoChannel destVideoChannel = (VideoChannel) destChannel; destIsReceiving = destVideoChannel.getSimulcastManager() .acceptSimulcastLayer(ssrc, srcVideoChannel); } } explodeSenderReport(destIsReceiving, outPacket, senderReport, rtpTranslatorImpl, senderSSRC, receiverSenderInformationMap, stream); } Recorder recorder = content.getRecorder(); MediaStream s; if (content.isRecording()) { if (recorder != null && (s = recorder.getMediaStream()) != null) { explodeSenderReport(true, outPacket, senderReport, rtpTranslatorImpl, senderSSRC, receiverSenderInformationMap, s); } } } } return true; } private void explodeSenderReport(boolean destIsReceiving, RTCPCompoundPacket outPacket, RTCPSRPacket senderReport, RTPTranslatorImpl rtpTranslatorImpl, Integer senderSSRC, Map<Integer, SenderInformation> receiverSenderInformationMap, MediaStream stream) { // "Clone" the SR. RTCPSRPacket sr = new RTCPSRPacket( senderSSRC, new RTCPReportBlock[0]); sr.ntptimestampmsw = senderReport.ntptimestampmsw; sr.ntptimestamplsw = senderReport.ntptimestamplsw; sr.rtptimestamp = senderReport.rtptimestamp; sr.octetcount = senderReport.octetcount; sr.packetcount = senderReport.packetcount; Integer receiverSSRC = (int) stream.getLocalSourceID(); if (destIsReceiving) { // The sender is being received by this receiver: // Cache the sender information. SenderInformation si = new SenderInformation(); si.octetCount = senderReport.octetcount; si.packetCount = senderReport.packetcount; synchronized (receiverSenderInformationMap) { receiverSenderInformationMap.put(receiverSSRC, si); } } else { // The sender is NOT being received by this receiver: // We keep the packet count/octet count stable. SenderInformation si; synchronized (receiverSenderInformationMap) { if (receiverSenderInformationMap .containsKey(receiverSSRC)) { si = receiverSenderInformationMap .get(receiverSSRC); } else { si = null; } } if (si != null) { sr.packetcount = si.packetCount; sr.octetcount = si.octetCount; } else { sr.packetcount = 0L; sr.octetcount = 0L; } } // Send the SR to the receiver. RTCPPacket[] packets = new RTCPPacket[outPacket.packets.length]; packets[0] = sr; System.arraycopy( outPacket.packets, 1, packets, 1, outPacket.packets.length - 1); RTCPCompoundPacket compoundPacket = new RTCPCompoundPacket(packets); Payload payload = new RTCPPacketPayload(compoundPacket); rtpTranslatorImpl.writeControlPayload(payload, stream); } private Map<Integer, SenderInformation> getReceiverSenderInformationMap( Integer senderSSRC) { Map<Integer, SenderInformation> receiverSenderInformationMap; synchronized (lastSenderInformationMap) { if (lastSenderInformationMap.containsKey(senderSSRC)) { receiverSenderInformationMap = lastSenderInformationMap.get(senderSSRC); } else { receiverSenderInformationMap = new HashMap<Integer, SenderInformation>(); lastSenderInformationMap.put(senderSSRC, receiverSenderInformationMap); } } return receiverSenderInformationMap; } private static class SenderInformation { long packetCount; long octetCount; } }
package org.musetest.core.resultstorage; import org.musetest.core.*; import org.musetest.core.context.*; import org.musetest.core.events.*; import org.musetest.core.plugins.*; import org.musetest.core.resource.*; import org.musetest.core.suite.*; import org.musetest.core.values.*; import java.io.*; public class LocalStorageLocationPlugin extends GenericConfigurableTestPlugin implements LocalStorageLocationProvider { LocalStorageLocationPlugin(LocalStorageLocationPluginConfiguration configuration) { super(configuration); } @Override public void initialize(MuseExecutionContext context) throws MuseInstantiationException, ValueSourceResolutionError { if (_initialized) return; _context = context; _initialized = true; MuseValueSource output_folder_source = BaseValueSource.getValueSource(_configuration.parameters(), LocalStorageLocationPluginConfiguration.BASE_LOCATION_PARAM_NAME, true, context.getProject()); String output_folder_path = BaseValueSource.getValue(output_folder_source, context, false, String.class); _output_folder = new File(output_folder_path); if (!_output_folder.exists()) if (!_output_folder.mkdirs()) { final MuseEvent event = MessageEventType.create(String.format("Unable to create output folder (%s). Results will not be stored.", output_folder_path)); event.addTag(MuseEvent.ERROR); context.raiseEvent(event); _output_folder = null; } if (_output_folder != null) context.raiseEvent(MessageEventType.create(String.format("Will store results locally at %s.", _output_folder.getAbsolutePath()))); } @Override protected boolean applyToContextType(MuseExecutionContext context) { if (Plugins.findType(this.getClass(), context) != null) return false; return context instanceof TestSuiteExecutionContext || context instanceof TestExecutionContext; } @Override public File getBaseFolder() { return _output_folder; } @Override synchronized public File getTestFolder(TestExecutionContext context) { final File folder = new File(_output_folder, context.getTestExecutionId()); if (!folder.exists()) if (!folder.mkdir()) _context.raiseEvent(MessageEventType.create(String.format("Unable to create output folder (%s). Results will not be stored.", folder.getAbsolutePath()))); return folder; } private MuseExecutionContext _context; private File _output_folder = null; private boolean _initialized = false; }
package com.okta.tools.authentication; import com.okta.tools.OktaAwsCliEnvironment; import com.okta.tools.models.AuthResult; import org.apache.http.HttpStatus; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.json.JSONObject; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Scanner; import java.util.stream.Stream; public final class OktaAuthentication { private static final Logger logger = LogManager.getLogger(OktaAuthentication.class); private OktaAwsCliEnvironment environment; public OktaAuthentication(OktaAwsCliEnvironment environment) { this.environment = environment; } private static String missingProperty = "Could not find the expected property \"%s\" in the response message."; /** * Performs primary and secondary (2FA) authentication, then returns a session token * * @return The session token * @throws IOException If an error occurs during the api call or during the processing of the result. */ public String getOktaSessionToken() throws IOException { // Returns an Okta Authentication Transaction object. // See: https://developer.okta.com/docs/api/resources/authn#authentication-transaction-model JSONObject primaryAuthResult = new JSONObject(getPrimaryAuthResponse(environment.oktaOrg)); // "statusProperty" = The current state of the authentication transaction. final String statusProperty = "status"; // "sessionProperty" = An ephemeral one-time token used to bootstrap an Okta session. final String sessionProperty = "sessionToken"; // Sanity check: Does the (required) status property exist? if (!primaryAuthResult.has(statusProperty)) { throw makeException(primaryAuthResult, missingProperty, statusProperty); } // Validate status value. TransactionState state; try { state = TransactionState.valueOf(primaryAuthResult.getString(statusProperty).toUpperCase()); } catch (IllegalArgumentException e) { throw makeException( primaryAuthResult, e, "The response message contained an unrecognized value \"%s\" for property \"%s\".", primaryAuthResult.getString(statusProperty), statusProperty); } // Handle the response switch(state) { // Handled States case INVALID: throw new IllegalStateException("Invalid value - should never happen."); case MFA_REQUIRED: // Handle second-factor return OktaMFA.promptForFactor(primaryAuthResult); case SUCCESS: if (primaryAuthResult.has(sessionProperty)) { return primaryAuthResult.getString(sessionProperty); } else { throw makeException(primaryAuthResult, missingProperty, sessionProperty); } // Unhandled States // If support for handling a new state is added, move to the 'Handled' block and keep the // values sorted in the order given by TransactionState.java. case UNAUTHENTICATED: case PASSWORD_WARN: case PASSWORD_EXPIRED: case RECOVERY: case RECOVERY_CHALLENGE: case PASSWORD_RESET: case LOCKED_OUT: case MFA_ENROLL: case MFA_ENROLL_ACTIVATE: case MFA_CHALLENGE: default: throw makeException( primaryAuthResult, "Handling for the received status code is not currently implemented.\n%s: %s", state.toString(), state.getDescription()); } } /** * Performs primary authentication and parses the response. * * @param oktaOrg The org to authenticate against * @return The response of the authentication * @throws IOException If an error occurs during the api call or during the processing of the result. */ private String getPrimaryAuthResponse(String oktaOrg) throws IOException { while (true) { AuthResult response = primaryAuthentication(getUsername(), getPassword(), oktaOrg); int requestStatus = response.statusLine.getStatusCode(); primaryAuthFailureHandler(requestStatus, oktaOrg); if (requestStatus == HttpStatus.SC_OK) { return response.responseContent; } if (environment.oktaPassword != null) { throw new IllegalStateException("Stored username or password is invalid."); } } } /** * Perform primary authentication against Okta * * @param username The username of the user * @param password The password of the user * @param oktaOrg The org to perform auth against * @return The authentication result * @throws IOException If an error occurs during the api call or during the processing of the result. */ private AuthResult primaryAuthentication(String username, String password, String oktaOrg) throws IOException { // Okta authn API docs: https://developer.okta.com/docs/api/resources/authn#primary-authentication HttpPost httpPost = new HttpPost("https://" + oktaOrg + "/api/v1/authn"); httpPost.addHeader("Accept", "application/json"); httpPost.addHeader("Content-Type", "application/json"); httpPost.addHeader("Cache-Control", "no-cache"); JSONObject authnRequest = new JSONObject(); authnRequest.put("username", username); authnRequest.put("password", password); StringEntity entity = new StringEntity(authnRequest.toString(), StandardCharsets.UTF_8); entity.setContentType("application/json"); httpPost.setEntity(entity); try (CloseableHttpClient httpClient = HttpClients.createSystem()) { CloseableHttpResponse authnResponse = httpClient.execute(httpPost); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(65536); authnResponse.getEntity().writeTo(byteArrayOutputStream); return new AuthResult(authnResponse.getStatusLine(), byteArrayOutputStream.toString()); } } /** * Handles failures during the primary authentication flow * * @param responseStatus The status of the response * @param oktaOrg The org against which authentication was performed */ private void primaryAuthFailureHandler(int responseStatus, String oktaOrg) { if (responseStatus == 400 || responseStatus == 401) { logger.error("Invalid username or password."); } else if (responseStatus == 500) { logger.error("\nUnable to establish connection with: " + oktaOrg + " \nPlease verify that your Okta org url is correct and try again"); } else if (responseStatus != 200) { throw new RuntimeException("Failed : HTTP error code : " + responseStatus); } } private String getUsername() { if (environment.oktaUsername == null || environment.oktaUsername.isEmpty()) { System.out.print("Username: "); return new Scanner(System.in).next(); } else { System.out.println("Username: " + environment.oktaUsername); return environment.oktaUsername; } } private String getPassword() { if (environment.oktaPassword == null || environment.oktaPassword.isEmpty()) { return promptForPassword(); } else { return environment.oktaPassword; } } private String promptForPassword() { if (System.console() == null) { // hack to be able to debug in an IDE System.out.print("Password: "); return new Scanner(System.in).next(); } else { return new String(System.console().readPassword("Password: ")); } } private RuntimeException makeException(JSONObject primaryAuthResult, String template, Object... args) { return makeException(primaryAuthResult, null, template, args); } // Create an exception by formatting a string with arguments and appending the json message. private RuntimeException makeException(JSONObject primaryAuthResult, Exception e, String template, Object... args) { // Add the formatted json message to the output. template = template + "\n\nMessage:\n%s\n"; String responseJson = primaryAuthResult.toString(2); Collection<Object> argsWithMessageJson = new ArrayList<>(); Stream.of(args).forEach(argsWithMessageJson::add); argsWithMessageJson.add(responseJson); if (e != null) { return new IllegalStateException(String.format(template, argsWithMessageJson.toArray()), e); } else { return new IllegalStateException(String.format(template, argsWithMessageJson.toArray())); } } }
package org.nschmidt.ldparteditor.data; import java.math.BigDecimal; import java.util.ArrayList; import java.util.HashSet; import java.util.Locale; import java.util.Set; import java.util.TreeSet; import org.lwjgl.util.vector.Vector3f; import org.nschmidt.ldparteditor.helpers.composite3d.RectifierSettings; import org.nschmidt.ldparteditor.helpers.math.Vector3d; import org.nschmidt.ldparteditor.logger.NLogger; public class VM25RectangleSnap extends VM24MeshReducer { private TreeSet<Vertex> selectedVerticesBackup = new TreeSet<Vertex>(); private final Vertex[] NO_SOLUTION = new Vertex[]{}; protected VM25RectangleSnap(DatFile linkedDatFile) { super(linkedDatFile); } public void snapRectanglePrimitives() { ArrayList<GData1> rectPrims = new ArrayList<GData1>(); final HashSet<String> RECT_NAMES = new HashSet<String>(); RECT_NAMES.add("rect1.dat"); //$NON-NLS-1$ RECT_NAMES.add("rect2a.dat"); //$NON-NLS-1$ RECT_NAMES.add("rect2p.dat"); //$NON-NLS-1$ RECT_NAMES.add("rect3.dat"); //$NON-NLS-1$ RECT_NAMES.add("rect.dat"); //$NON-NLS-1$ for (GData1 g : selectedSubfiles) { if (RECT_NAMES.contains(g.shortName.toLowerCase(Locale.ENGLISH))) { rectPrims.add(g); } } for (GData1 rect : rectPrims) { Set<VertexInfo> vis = lineLinkedToVertices.get(rect); if (vis != null) { for (VertexInfo vi : vis) { selectedVertices.remove(vi.vertex); } } } selectedVerticesBackup.addAll(selectedVertices); clearSelection(); for (GData1 rect : rectPrims) { snap(rect); } if (!rectPrims.isEmpty()) { setModified(true, true); } } private void snap(GData1 rect) { // 1. Took all vertices in order from the rectangle-primitive Vertex[] verts = null; { Set<VertexInfo> vis = lineLinkedToVertices.get(rect); if (vis == null) return; for (VertexInfo vi : vis) { if (vi.linkedData.type() == 4) { NLogger.debug(getClass(), vi.position); verts = quads.get(vi.linkedData); break; } } } if (verts == null) return; // 2. Get a possible (and valid) new 4-vertex loop from the vertices of the rectangle Vertex[] loop = getLoop(verts); if (loop == null) return; // 3. Rectify the loop (with rectangle-primitives), delete the old rectangle primitive rectify(loop, rect); } private Vertex[] getLoop(Vertex[] verts) { Vertex[][] loops = new Vertex[4][]; for (int i = 0; i < 4; i++) { loops[i] = getLoop(verts[i], verts); } { float minDist = Float.MAX_VALUE; for (int i = 0; i < 4; i++) { if (loops[i] != null && loops[i][0].x < minDist) minDist = loops[i][0].x; } for (int i = 0; i < 4; i++) { if (loops[i] != null && loops[i][0].x == minDist){ verts[0] = loops[i][1]; verts[1] = loops[i][2]; verts[2] = loops[i][3]; verts[3] = loops[i][4]; break; } } } return verts; } private Vertex[] getLoop(Vertex vert, Vertex[] verts) { // FIXME Needs implementation for issue #230! Vertex[] result = new Vertex[5]; TreeSet<Vertex> validVertices = new TreeSet<Vertex>(); if (selectedVerticesBackup.isEmpty()) { validVertices.addAll(getVertices()); } else { validVertices.addAll(selectedVerticesBackup); } for (int i = 0; i < 4; i++) { validVertices.remove(verts[i]); } boolean isConnectedToModel = getLinkedSurfaces(vert).size() > 1; BigDecimal minDistance = new BigDecimal("10000000000000"); //$NON-NLS-1$ // If the vertex is not connected to other surfaces, search for the closest vertex if (!isConnectedToModel) { Vertex vert2 = vert; for (Vertex v : validVertices) { Vector3d v1 = new Vector3d(v); Vector3d v2 = new Vector3d(vert); BigDecimal distance = Vector3d.distSquare(v1, v2); if (distance.compareTo(minDistance) < 0) { minDistance = distance; vert2 = v; } } vert = vert2; } Vertex[] loop = getLoop(vert, validVertices); if (loop != null) { result[0] = new Vertex(minDistance, BigDecimal.ZERO, BigDecimal.ZERO); result[1] = loop[0]; result[2] = loop[1]; result[3] = loop[2]; result[4] = loop[3]; } return result; } private Vertex[] getLoop(Vertex vert, TreeSet<Vertex> validVertices) { return getLoop(vert, new Vertex[4], 0, validVertices); } private Vertex[] getLoop(Vertex vert, Vertex[] verts, int depth, TreeSet<Vertex> validVertices) { if (depth > 3) { return null; } Vertex backup = verts[depth]; verts[depth] = vert; NLogger.debug(getClass(), "Depth: " + depth + " " + verts[0] + " " + verts[1] + " " + verts[2] + " " + verts[3]); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$ TreeSet<Vertex> surfVerts = new TreeSet<Vertex>(); { HashSet<GData> surfs = getLinkedSurfaces(vert); for (GData g : surfs) { int type = g.type(); if (type == 3) { Vertex[] t = triangles.get(g); if (t == null) continue; surfVerts.add(t[0]); surfVerts.add(t[1]); surfVerts.add(t[2]); } else if (type == 4) { Vertex[] t = quads.get(g); if (t == null) continue; surfVerts.add(t[0]); surfVerts.add(t[1]); surfVerts.add(t[2]); surfVerts.add(t[3]); } } } boolean finishedLoopSearch = false; if (depth == 3) { finishedLoopSearch = surfVerts.contains(verts[0]); } for (int i = 0; i < (depth + 1); i++) { surfVerts.remove(verts[i]); } boolean foundSolution = false; if (finishedLoopSearch) { HashSet<GData> s1 = getLinkedSurfaces(verts[0]); HashSet<GData> s2 = getLinkedSurfaces(verts[1]); HashSet<GData> s3 = getLinkedSurfaces(verts[2]); HashSet<GData> s4 = getLinkedSurfaces(verts[3]); s1.retainAll(s2); s1.retainAll(s3); s1.retainAll(s4); if (!s1.isEmpty()) { verts[depth] = backup; return NO_SOLUTION; } } else { for (Vertex v : surfVerts) { Vertex[] solution = getLoop(v, verts, (depth + 1), validVertices); if (solution != NO_SOLUTION) { foundSolution = true; } } if (depth > 0 && !foundSolution) { verts[depth] = backup; } } if (verts[0] != null && verts[1] != null && verts[2] != null && verts[3] != null) { return verts; } else { return NO_SOLUTION; } } private void rectify(Vertex[] loop, GData1 g) { final Vector3f[] normals = new Vector3f[] { new Vector3f(), new Vector3f(), new Vector3f(), new Vector3f() }; { Vertex v1 = loop[0]; Vertex v2 = loop[1]; Vertex v3 = loop[2]; Vertex v4 = loop[3]; final Vector3f[] lineVectors = new Vector3f[] { new Vector3f(), new Vector3f(), new Vector3f(), new Vector3f() }; Vector3f.sub(new Vector3f(v2.x, v2.y, v2.z), new Vector3f(v1.x, v1.y, v1.z), lineVectors[0]); Vector3f.sub(new Vector3f(v3.x, v3.y, v3.z), new Vector3f(v2.x, v2.y, v2.z), lineVectors[1]); Vector3f.sub(new Vector3f(v4.x, v4.y, v4.z), new Vector3f(v3.x, v3.y, v3.z), lineVectors[2]); Vector3f.sub(new Vector3f(v1.x, v1.y, v1.z), new Vector3f(v4.x, v4.y, v4.z), lineVectors[3]); Vector3f.cross(lineVectors[0], lineVectors[1], normals[0]); Vector3f.cross(lineVectors[1], lineVectors[2], normals[1]); Vector3f.cross(lineVectors[2], lineVectors[3], normals[2]); Vector3f.cross(lineVectors[3], lineVectors[0], normals[3]); } Vector3f normal = new Vector3f(); for (int i = 0; i < 4; i++) { Vector3f.add(normals[i], normal, normal); } GData4 quad = new GData4( g.colourNumber, g.r, g.g, g.b, g.a, loop[0].X, loop[0].Y, loop[0].Z, loop[1].X, loop[1].Y, loop[1].Z, loop[2].X, loop[2].Y, loop[2].Z, loop[3].X, loop[3].Y, loop[3].Z, new Vector3d(new BigDecimal(normal.x), new BigDecimal(normal.y), new BigDecimal(normal.z)), g.parent, linkedDatFile); linker(g, quad); selectedData.add(quad); selectedQuads.add(quad); RectifierSettings rs = new RectifierSettings(); rs.setScope(1); rectify(rs, false, false); selectedData.clear(); selectedQuads.clear(); } }
// checkstyle: Checks Java source code for adherence to a set of rules. // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.puppycrawl.tools.checkstyle.gui; import com.puppycrawl.tools.checkstyle.api.DetailAST; import java.awt.Color; import java.util.List; import javax.swing.JTextArea; /** * Helper class to select a code. */ public class CodeSelector { /** DetailAST node*/ private final DetailAST ast; /** editor */ private final JTextArea editor; /** mapping */ private final List<Integer> lines2position; /** * Constructor * @param ast * @param editor * @param lines2position */ public CodeSelector(final DetailAST ast, final JTextArea editor, final List<Integer> lines2position) { this.ast = ast; this.editor = editor; this.lines2position = lines2position; } /** * Set a selection position from AST line and Column */ public void select() { int start = lines2position.get(ast.getLineNo()) + ast.getColumnNo(); int end = findLastPosition(ast); editor.setSelectedTextColor(Color.blue); editor.requestFocusInWindow(); editor.setSelectionStart(start); editor.setSelectionEnd(end); editor.transferFocusBackward(); } private int findLastPosition(final DetailAST ast) { if (ast.getChildCount() == 0) { return lines2position.get(ast.getLineNo()) + ast.getColumnNo() + ast.getText().length(); } else { return findLastPosition(ast.getLastChild()); } } }
package com.chiralbehaviors.CoRE.product; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.Iterator; import java.util.Set; import javax.persistence.TypedQuery; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.chiralbehaviors.CoRE.agency.Agency; import com.chiralbehaviors.CoRE.attribute.Attribute; import com.chiralbehaviors.CoRE.attribute.ValueType; import com.chiralbehaviors.CoRE.attribute.unit.Unit; import com.chiralbehaviors.CoRE.test.DatabaseTest; /** * @author hhildebrand * */ public class ProductTest extends DatabaseTest { private static final Logger LOG = LoggerFactory.getLogger(ProductTest.class); @Override @After public void after() { em.getTransaction() .rollback(); em.clear(); } @Test public void createEntity() { TypedQuery<Agency> query = em.createNamedQuery("agency.findByName", Agency.class) .setParameter("name", "CoREd"); Agency r = query.getSingleResult(); LOG.debug(String.format("Agency: %s", r)); assertNotNull("Agency was null!", r); assertEquals("CoREd", r.getName()); Product b = new Product(); String name = "New Product"; b.setName(name); b.setDescription("An Product created solely for testing purposes"); b.setUpdatedBy(r); em.persist(b); em.flush(); // Now check to see that the Product you just made actually got into // the database. em.clear(); TypedQuery<Product> productQuery = em.createNamedQuery("product.findByName", Product.class) .setParameter("name", name); Product b2 = productQuery.getSingleResult(); assertNotNull("Retrieved Product was null!", b2); assertTrue(b != b2); assertEquals(b, b2); } @Before public void initData() { Agency core = new Agency("CoREd"); core.setUpdatedBy(core); em.persist(core); Product peptideFoo = new Product("Peptide Foo", "The Foo peptide is lethal! Do not eat!", core); em.persist(peptideFoo); Product peptideBar = new Product("Peptide Bar", "The Foo peptide is lethal! Do not eat!", core); em.persist(peptideBar); Attribute diagram = new Attribute("Diagram", "The D3 Net of the molecule", ValueType.JSON, core); em.persist(diagram); Unit aminoAcids = new Unit("Amino Acids", "A unit of length for protein primary sequences", core); aminoAcids.setAbbreviation("aa"); em.persist(aminoAcids); ProductAttribute attribute = new ProductAttribute(peptideFoo, diagram, core); attribute.setUnit(aminoAcids); attribute.setValue("Fooled ya"); em.persist(attribute); em.flush(); } @SuppressWarnings("boxing") @Test public void testAttributes() { TypedQuery<Product> findProduct = em.createNamedQuery("product.findByName", Product.class) .setParameter("name", "Peptide Foo"); Product b = findProduct.getSingleResult(); assertNotNull(b); assertEquals(b.getName(), "Peptide Foo"); LOG.debug(String.format("Product is: %s", b)); TypedQuery<Attribute> findAttribute = em.createNamedQuery("attribute.findByName", Attribute.class) .setParameter("name", "Diagram"); Attribute a = findAttribute.getSingleResult(); assertNotNull(a); assertEquals(a.getName(), "Diagram"); LOG.debug(String.format("Attribute is: %s", a)); em.refresh(b); Set<ProductAttribute> productAttributes = b.getAttributes(); assertNotNull(productAttributes); assertEquals(1, productAttributes.size()); Iterator<ProductAttribute> iter = productAttributes.iterator(); ProductAttribute bea = iter.next(); assertNotNull(bea); assertEquals(b, bea.getProduct()); assertEquals(a, bea.getAttribute()); assertEquals("Fooled ya", bea.getValue()); } }
package org.nutz.mvc.adaptor.injector; import java.lang.reflect.Field; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.nutz.lang.Mirror; import org.nutz.lang.Strings; import org.nutz.lang.inject.Injecting; import org.nutz.mvc.adaptor.ParamInjector; import org.nutz.mvc.annotation.Param; public class ObjectPairInjector implements ParamInjector { private Injecting[] injs; private String[] names; private Mirror<?> mirror; public ObjectPairInjector(String prefix, Class<?> type) { prefix = Strings.isBlank(prefix) ? "" : Strings.trim(prefix); this.mirror = Mirror.me(type); Field[] fields = mirror.getFields(); this.injs = new Injecting[fields.length]; this.names = new String[fields.length]; for (int i = 0; i < fields.length; i++) { Field f = fields[i]; this.injs[i] = mirror.getInjecting(f.getName()); Param param = f.getAnnotation(Param.class); String nm = null == param ? f.getName() : param.value(); this.names[i] = prefix + nm; } } public Object get(HttpServletRequest req, HttpServletResponse resp, Object refer) { Object obj = mirror.born(); for (int i = 0; i < injs.length; i++) { Injecting inj = injs[i]; String s = req.getParameter(names[i]); if (null == s) continue; if (Strings.isBlank(s)) s = null; inj.inject(obj, s); } return obj; } }
package com.raoulvdberge.refinedstorage.block; import com.raoulvdberge.refinedstorage.api.network.node.INetworkNode; import com.raoulvdberge.refinedstorage.apiimpl.network.node.ICoverable; import com.raoulvdberge.refinedstorage.apiimpl.network.node.cover.Cover; import com.raoulvdberge.refinedstorage.apiimpl.network.node.cover.CoverManager; import com.raoulvdberge.refinedstorage.apiimpl.network.node.cover.CoverType; import com.raoulvdberge.refinedstorage.block.info.BlockInfoBuilder; import com.raoulvdberge.refinedstorage.block.info.IBlockInfo; import com.raoulvdberge.refinedstorage.block.property.PropertyObject; import com.raoulvdberge.refinedstorage.capability.CapabilityNetworkNodeProxy; import com.raoulvdberge.refinedstorage.render.IModelRegistration; import com.raoulvdberge.refinedstorage.render.collision.AdvancedRayTraceResult; import com.raoulvdberge.refinedstorage.render.collision.AdvancedRayTracer; import com.raoulvdberge.refinedstorage.render.collision.CollisionGroup; import com.raoulvdberge.refinedstorage.render.collision.constants.ConstantsCable; import com.raoulvdberge.refinedstorage.render.model.baked.BakedModelCableCover; import com.raoulvdberge.refinedstorage.render.model.baked.BakedModelFullbright; import com.raoulvdberge.refinedstorage.tile.TileBase; import com.raoulvdberge.refinedstorage.tile.TileCable; import com.raoulvdberge.refinedstorage.tile.TileNode; import com.raoulvdberge.refinedstorage.util.CollisionUtils; import net.minecraft.block.SoundType; import net.minecraft.block.material.Material; import net.minecraft.block.properties.PropertyBool; import net.minecraft.block.state.BlockFaceShape; import net.minecraft.block.state.BlockStateContainer; import net.minecraft.block.state.IBlockState; import net.minecraft.client.renderer.block.model.ModelResourceLocation; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.BlockRenderLayer; import net.minecraft.util.EnumFacing; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.RayTraceResult; import net.minecraft.util.math.Vec3d; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; import net.minecraftforge.common.property.IExtendedBlockState; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import java.util.ArrayList; import java.util.List; public class BlockCable extends BlockNode { public static final PropertyObject<Cover> COVER_NORTH = new PropertyObject<>("cover_north", Cover.class); public static final PropertyObject<Cover> COVER_EAST = new PropertyObject<>("cover_east", Cover.class); public static final PropertyObject<Cover> COVER_SOUTH = new PropertyObject<>("cover_south", Cover.class); public static final PropertyObject<Cover> COVER_WEST = new PropertyObject<>("cover_west", Cover.class); public static final PropertyObject<Cover> COVER_UP = new PropertyObject<>("cover_up", Cover.class); public static final PropertyObject<Cover> COVER_DOWN = new PropertyObject<>("cover_down", Cover.class); private static final PropertyBool NORTH = PropertyBool.create("north"); private static final PropertyBool EAST = PropertyBool.create("east"); private static final PropertyBool SOUTH = PropertyBool.create("south"); private static final PropertyBool WEST = PropertyBool.create("west"); private static final PropertyBool UP = PropertyBool.create("up"); private static final PropertyBool DOWN = PropertyBool.create("down"); public BlockCable(IBlockInfo info) { super(info); } public BlockCable() { super(createBuilder("cable").tileEntity(TileCable::new).create()); } static BlockInfoBuilder createBuilder(String id) { return BlockInfoBuilder.forId(id).material(Material.GLASS).soundType(SoundType.GLASS).hardness(0.35F); } @SideOnly(Side.CLIENT) void registerCover(IModelRegistration modelRegistration) { modelRegistration.addBakedModelOverride(info.getId(), BakedModelCableCover::new); } @SideOnly(Side.CLIENT) void registerCoverAndFullbright(IModelRegistration modelRegistration, String... textures) { modelRegistration.addBakedModelOverride(info.getId(), base -> new BakedModelCableCover(new BakedModelFullbright(base, textures))); } @Override @SideOnly(Side.CLIENT) public void registerModels(IModelRegistration modelRegistration) { modelRegistration.setModel(this, 0, new ModelResourceLocation(info.getId(), "inventory")); registerCover(modelRegistration); } @Override public boolean hasConnectedState() { return false; } @Override protected BlockStateContainer createBlockState() { return super.createBlockStateBuilder() .add(NORTH) .add(EAST) .add(SOUTH) .add(WEST) .add(UP) .add(DOWN) .add(COVER_NORTH) .add(COVER_EAST) .add(COVER_SOUTH) .add(COVER_WEST) .add(COVER_UP) .add(COVER_DOWN) .build(); } @Override @SuppressWarnings("deprecation") public IBlockState getActualState(IBlockState state, IBlockAccess world, BlockPos pos) { TileEntity tile = world.getTileEntity(pos); state = super.getActualState(state, world, pos) .withProperty(NORTH, hasConnectionWith(world, pos, this, tile, EnumFacing.NORTH)) .withProperty(EAST, hasConnectionWith(world, pos, this, tile, EnumFacing.EAST)) .withProperty(SOUTH, hasConnectionWith(world, pos, this, tile, EnumFacing.SOUTH)) .withProperty(WEST, hasConnectionWith(world, pos, this, tile, EnumFacing.WEST)) .withProperty(UP, hasConnectionWith(world, pos, this, tile, EnumFacing.UP)) .withProperty(DOWN, hasConnectionWith(world, pos, this, tile, EnumFacing.DOWN)); return state; } @Override public IBlockState getExtendedState(IBlockState state, IBlockAccess world, BlockPos pos) { IBlockState s = super.getExtendedState(state, world, pos); TileEntity tile = world.getTileEntity(pos); if (tile instanceof TileNode && ((TileNode) tile).getNode() instanceof ICoverable) { s = ((IExtendedBlockState) s).withProperty(COVER_NORTH, ((ICoverable) ((TileNode) tile).getNode()).getCoverManager().getCover(EnumFacing.NORTH)); s = ((IExtendedBlockState) s).withProperty(COVER_EAST, ((ICoverable) ((TileNode) tile).getNode()).getCoverManager().getCover(EnumFacing.EAST)); s = ((IExtendedBlockState) s).withProperty(COVER_SOUTH, ((ICoverable) ((TileNode) tile).getNode()).getCoverManager().getCover(EnumFacing.SOUTH)); s = ((IExtendedBlockState) s).withProperty(COVER_WEST, ((ICoverable) ((TileNode) tile).getNode()).getCoverManager().getCover(EnumFacing.WEST)); s = ((IExtendedBlockState) s).withProperty(COVER_UP, ((ICoverable) ((TileNode) tile).getNode()).getCoverManager().getCover(EnumFacing.UP)); s = ((IExtendedBlockState) s).withProperty(COVER_DOWN, ((ICoverable) ((TileNode) tile).getNode()).getCoverManager().getCover(EnumFacing.DOWN)); } return s; } private static boolean hasConnectionWith(IBlockAccess world, BlockPos pos, BlockBase block, TileEntity tile, EnumFacing direction) { if (!(tile instanceof TileNode)) { return false; } INetworkNode node = ((TileNode) tile).getNode(); if (node instanceof ICoverable) { Cover cover = ((ICoverable) node).getCoverManager().getCover(direction); if (cover != null && cover.getType() != CoverType.HOLLOW) { return false; } } TileEntity otherTile = world.getTileEntity(pos.offset(direction)); if (otherTile instanceof TileNode && ((TileNode) otherTile).getNode() instanceof ICoverable) { Cover cover = ((ICoverable) ((TileNode) otherTile).getNode()).getCoverManager().getCover(direction.getOpposite()); if (cover != null && cover.getType() != CoverType.HOLLOW) { return false; } } if (otherTile != null && otherTile.hasCapability(CapabilityNetworkNodeProxy.NETWORK_NODE_PROXY_CAPABILITY, direction.getOpposite())) { // Prevent the block adding connections in itself // For example: importer cable connection on the importer face if (block.getDirection() != null && ((TileBase) tile).getDirection() == direction) { return false; } return true; } return false; } protected boolean canAccessGui(IBlockState state, World world, BlockPos pos, float hitX, float hitY, float hitZ) { state = getActualState(state, world, pos); for (CollisionGroup group : getCollisions(world.getTileEntity(pos), state)) { if (group.canAccessGui()) { for (AxisAlignedBB aabb : group.getItems()) { if (CollisionUtils.isInBounds(aabb, hitX, hitY, hitZ)) { return true; } } } } return false; } public List<CollisionGroup> getCollisions(TileEntity tile, IBlockState state) { List<CollisionGroup> groups = getCoverCollisions(tile); groups.add(ConstantsCable.CORE); if (state.getValue(NORTH)) { groups.add(ConstantsCable.NORTH); } if (state.getValue(EAST)) { groups.add(ConstantsCable.EAST); } if (state.getValue(SOUTH)) { groups.add(ConstantsCable.SOUTH); } if (state.getValue(WEST)) { groups.add(ConstantsCable.WEST); } if (state.getValue(UP)) { groups.add(ConstantsCable.UP); } if (state.getValue(DOWN)) { groups.add(ConstantsCable.DOWN); } return groups; } private List<CollisionGroup> getCoverCollisions(TileEntity tile) { List<CollisionGroup> groups = new ArrayList<>(); if (tile instanceof TileNode && ((TileNode) tile).getNode() instanceof ICoverable) { CoverManager coverManager = ((ICoverable) ((TileNode) tile).getNode()).getCoverManager(); Cover coverNorth = coverManager.getCover(EnumFacing.NORTH); Cover coverEast = coverManager.getCover(EnumFacing.EAST); Cover coverSouth = coverManager.getCover(EnumFacing.SOUTH); Cover coverWest = coverManager.getCover(EnumFacing.WEST); Cover coverUp = coverManager.getCover(EnumFacing.UP); Cover coverDown = coverManager.getCover(EnumFacing.DOWN); if (coverNorth != null) { groups.add(new CollisionGroup().addItem(CollisionUtils.getBounds( coverWest != null ? 2 : 0, coverDown != null ? 2 : 0, 0, coverEast != null ? 14 : 16, coverUp != null ? 14 : 16, 2 ))); if (coverNorth.getType() != CoverType.HOLLOW) { groups.add(ConstantsCable.HOLDER_NORTH); } } if (coverEast != null) { groups.add(new CollisionGroup().addItem(CollisionUtils.getBounds( 14, coverDown != null ? 2 : 0, 0, 16, coverUp != null ? 14 : 16, 16 ))); if (coverEast.getType() != CoverType.HOLLOW) { groups.add(ConstantsCable.HOLDER_EAST); } } if (coverSouth != null) { groups.add(new CollisionGroup().addItem(CollisionUtils.getBounds( coverEast != null ? 14 : 16, coverDown != null ? 2 : 0, 16, coverWest != null ? 2 : 0, coverUp != null ? 14 : 16, 14 ))); if (coverSouth.getType() != CoverType.HOLLOW) { groups.add(ConstantsCable.HOLDER_SOUTH); } } if (coverWest != null) { groups.add(new CollisionGroup().addItem(CollisionUtils.getBounds( 0, coverDown != null ? 2 : 0, 0, 2, coverUp != null ? 14 : 16, 16 ))); if (coverWest.getType() != CoverType.HOLLOW) { groups.add(ConstantsCable.HOLDER_WEST); } } if (coverUp != null) { groups.add(new CollisionGroup().addItem(CollisionUtils.getBounds( 0, 14, 0, 16, 16, 16 ))); if (coverUp.getType() != CoverType.HOLLOW) { groups.add(ConstantsCable.HOLDER_UP); } } if (coverDown != null) { groups.add(new CollisionGroup().addItem(CollisionUtils.getBounds( 0, 0, 0, 16, 2, 16 ))); if (coverDown.getType() != CoverType.HOLLOW) { groups.add(ConstantsCable.HOLDER_DOWN); } } } return groups; } @Override @SuppressWarnings("deprecation") public void addCollisionBoxToList(IBlockState state, World world, BlockPos pos, AxisAlignedBB entityBox, List<AxisAlignedBB> collidingBoxes, Entity entityIn, boolean isActualState) { for (CollisionGroup group : getCollisions(world.getTileEntity(pos), this.getActualState(state, world, pos))) { for (AxisAlignedBB aabb : group.getItems()) { addCollisionBoxToList(pos, entityBox, collidingBoxes, aabb); } } } @Override @SuppressWarnings("deprecation") public RayTraceResult collisionRayTrace(IBlockState state, World world, BlockPos pos, Vec3d start, Vec3d end) { AdvancedRayTraceResult result = AdvancedRayTracer.rayTrace(pos, start, end, getCollisions(world.getTileEntity(pos), this.getActualState(state, world, pos))); return result != null ? result.getHit() : null; } @Override @SuppressWarnings("deprecation") public boolean isOpaqueCube(IBlockState state) { return false; } @Override @SuppressWarnings("deprecation") public boolean isFullCube(IBlockState state) { return false; } @Override @SuppressWarnings("deprecation") public IBlockState getStateForPlacement(World world, BlockPos pos, EnumFacing facing, float hitX, float hitY, float hitZ, int meta, EntityLivingBase entity) { IBlockState state = super.getStateForPlacement(world, pos, facing, hitX, hitY, hitZ, meta, entity); if (getDirection() != null) { return state.withProperty(getDirection().getProperty(), getDirection().getFrom(facing, pos, entity)); } return state; } @Override public BlockRenderLayer getBlockLayer() { return BlockRenderLayer.CUTOUT; } @Override @SuppressWarnings("deprecation") public BlockFaceShape getBlockFaceShape(IBlockAccess worldIn, IBlockState state, BlockPos pos, EnumFacing face) { return BlockFaceShape.UNDEFINED; } }
package com.telefonica.iot.cygnus.sinks; import com.google.gson.JsonPrimitive; import static org.junit.Assert.*; // this is required by "fail" like assertions import com.telefonica.iot.cygnus.aggregation.NGSIGenericAggregator; import com.telefonica.iot.cygnus.aggregation.NGSIGenericRowAggregator; import com.telefonica.iot.cygnus.containers.NotifyContextRequest; import com.telefonica.iot.cygnus.containers.NotifyContextRequest.ContextAttribute; import com.telefonica.iot.cygnus.containers.NotifyContextRequest.ContextElement; import com.telefonica.iot.cygnus.containers.NotifyContextRequest.ContextMetadata; import com.telefonica.iot.cygnus.errors.CygnusBadConfiguration; import com.telefonica.iot.cygnus.interceptors.NGSIEvent; import com.telefonica.iot.cygnus.utils.CommonConstants; import static com.telefonica.iot.cygnus.utils.CommonUtilsForTests.getTestTraceHead; import com.telefonica.iot.cygnus.backends.sql.SQLQueryUtils; import com.telefonica.iot.cygnus.utils.NGSIConstants; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import org.apache.flume.Context; import org.apache.log4j.Level; import org.apache.log4j.LogManager; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.runners.MockitoJUnitRunner; /** * * @author smartcities */ @RunWith(MockitoJUnitRunner.class) public class NGSIPostgisSinkTest { /** * Constructor. */ public NGSIPostgisSinkTest() { LogManager.getRootLogger().setLevel(Level.FATAL); } // NGSIPostgisSinkTest @Test public void testConfigureEnableEncoding() { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = "falso"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); try { assertTrue(sink.getInvalidConfiguration()); System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- OK - 'enable_encoding=falso' was detected"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- FAIL - 'enable_encoding=falso' was not detected"); throw e; } // try catch } // testConfigureEnableEncoding @Test public void testConfigureEnableLowercase() { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = null; // default String enableLowercase = "falso"; String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); try { assertTrue(sink.getInvalidConfiguration()); System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- OK - 'enable_lowercase=falso' was detected"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- FAIL - 'enable_lowercase=falso' was not detected"); throw e; } // try catch } // testConfigureEnableLowercase // TBD: check for dataModel values in NGSIPostgisSink and uncomment this test. // @Test public void testConfigureDataModel() { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-service"; String enableEncoding = null; // default String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); try { assertTrue(sink.getInvalidConfiguration()); System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- OK - 'data_model=dm-by-service' was detected"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- FAIL - 'data_model=dm-by-service' was not detected"); throw e; } // try catch } // testConfigureDataModel @Test public void testConfigureAttrPersistence() { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + " String attrPersistence = "fila"; String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = null; // default String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); try { assertTrue(sink.getInvalidConfiguration()); System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- OK - 'attr_persistence=fila' was detected"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- FAIL - 'attr_persistence=fila' was not detected"); throw e; } // try catch } // testConfigureAttrPersistence @Test public void testConfigureSQLOptionsIsNull() { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + " String attrPersistence = null; String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = null; // default String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); assertNull(sink.getPostgisOptions()); System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- OK - postgisOptions is null when it is not configured"); } // testConfigureSQLOptionsIsNull @Test public void testConfigureSQLOptionsHasValue() { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + " String attrPersistence = null; String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = null; // default String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default String sqlOptions = "sslmode=require"; NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache, sqlOptions)); assertEquals(sqlOptions, sink.getPostgisOptions()); System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- OK - postgisOptions has value when it is configured"); } // testConfigureSQLOptionsIsNull @Test public void testBuildSchemaNameOldEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildSchemaName]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = "false"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String service = "someService"; String servicePath = "someServicePath"; try { String builtSchemaName = sink.buildSchemaName(service, servicePath); String expectedDBName = "someService"; try { assertEquals(expectedDBName, builtSchemaName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildSchemaName]") + "- OK - '" + expectedDBName + "' is equals to the encoding of <service>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildSchemaName]") + "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildSchemaName]") + "- FAIL - There was some problem when building the schema name"); throw e; } // try catch } // testBuildDBNameOldEncoding @Test public void testBuildDBNameOldEncodingDatabaseDataModel() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncodingDatabaseDataModel]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity-database"; // default String enableEncoding = "false"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String service = "someService"; try { String builtSchemaName = sink.buildDBName(service); String expectedDBName = "someService"; try { assertEquals(expectedDBName, builtSchemaName); System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncodingDatabaseDataModel]") + "- OK - '" + expectedDBName + "' is equals to the encoding of <service>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncodingDatabaseDataModel]") + "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncodingDatabaseDataModel]") + "- FAIL - There was some problem when building the Schema name"); throw e; } // try catch } // testBuildDBNameOldEncodingDatabaseDataModel @Test public void testBuildDBNameOldEncodingEntityTypeDatabaseDataModel() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncodingEntityTypeDatabaseDataModel]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity-type-database"; // default String enableEncoding = "false"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String service = "someService"; try { String builtSchemaName = sink.buildDBName(service); String expectedDBName = "someService"; try { assertEquals(expectedDBName, builtSchemaName); System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncodingEntityTypeDatabaseDataModel]") + "- OK - '" + expectedDBName + "' is equals to the encoding of <service>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncodingEntityTypeDatabaseDataModel]") + "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncodingEntityTypeDatabaseDataModel]") + "- FAIL - There was some problem when building the Schema name"); throw e; } // try catch } // testBuildDBNameOldEncodingEntityTypeDatabaseDataModel @Test public void testBuildDBNameOldEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncoding]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = "false"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String service = "someService"; try { String builtSchemaName = sink.buildDBName(service); // The default vale for the DB name String expectedDBName = "postgres"; try { assertEquals(expectedDBName, builtSchemaName); System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncoding]") + "- OK - '" + expectedDBName + "' is equals to the encoding of <service>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncoding]") + "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameOldEncoding]") + "- FAIL - There was some problem when building the Schema name"); throw e; } // try catch } // testBuildDBNameOldEncoding @Test public void testBuildSchemaNameNewEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildSchemaNameNewEncoding]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = "true"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String service = "someService"; String servicePath = "someServicePath"; try { String builtSchemaName = sink.buildSchemaName(service, servicePath); String expectedDBName = "somex0053ervice"; try { assertEquals(expectedDBName, builtSchemaName); System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildSchemaNameNewEncoding]") + "- OK - '" + expectedDBName + "' is equals to the encoding of <service>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildSchemaNameNewEncoding]") + "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildSchemaNameNewEncoding]") + "- FAIL - There was some problem when building the DB name"); throw e; } // try catch } // testBuildSchemaNameNewEncoding @Test public void testBuildDBNameNewEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncoding]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = "true"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String service = "someService"; try { String builtSchemaName = sink.buildDBName(service); String expectedDBName = "postgres"; try { assertEquals(expectedDBName, builtSchemaName); System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncoding]") + "- OK - '" + expectedDBName + "' is equals to the encoding of <service>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncoding]") + "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncoding]") + "- FAIL - There was some problem when building the DB name"); throw e; } // try catch } // testBuildDBNameNewEncoding @Test public void testBuildDBNameNewEncodingDatabaseDataModel() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncodingDatabaseDataModel]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity-database-schema"; // default String enableEncoding = "true"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String service = "someService"; try { String builtSchemaName = sink.buildDBName(service); String expectedDBName = "somex0053ervice"; try { assertEquals(expectedDBName, builtSchemaName); System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncodingDatabaseDataModel]") + "- OK - '" + expectedDBName + "' is equals to the encoding of <service>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncodingDatabaseDataModel]") + "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncodingDatabaseDataModel]") + "- FAIL - There was some problem when building the DB name"); throw e; } // try catch } // testBuildDBNameNewEncodingDatabaseDataModel @Test public void testBuildDBNameNewEncodingEntityTypeDatabaseDataModel() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncodingEntityTypeDatabaseDataModel]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity-type-database-schema"; // default String enableEncoding = "true"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String service = "someService"; try { String builtSchemaName = sink.buildDBName(service); String expectedDBName = "somex0053ervice"; try { assertEquals(expectedDBName, builtSchemaName); System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncodingEntityTypeDatabaseDataModel]") + "- OK - '" + expectedDBName + "' is equals to the encoding of <service>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncodingEntityTypeDatabaseDataModel]") + "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testBuildDBNameNewEncodingEntityTypeDatabaseDataModel]") + "- FAIL - There was some problem when building the DB name"); throw e; } // try catch } // testBuildDBNameNewEncodingEntityTypeDatabaseDataModel @Test public void testBuildTableNameNonRootServicePathDataModelByServicePathOldEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-service-path' the Postgis table name is the encoding of <service-path>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-service-path"; String enableEncoding = "false"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/somePath"; String entity = null; // irrelevant for this test String entityType = null; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "somePath"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameNonRootServicePathDataModelByServicePathOldEncoding @Test public void testBuildTableNameNonRootServicePathDataModelByServicePathNewEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-service-path' the Postgis table name is the encoding of <service-path>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-service-path"; String enableEncoding = "true"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/somePath"; String entity = null; // irrelevant for this test String entityType = null; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "x002fsomex0050ath"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameNonRootServicePathDataModelByServicePathNewEncoding @Test public void testBuildTableNameNonRootServicePathDataModelByEntityOldEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-service-path' the Postgis table name is the encoding of the concatenation of <service-path>, " + "<entityId> and <entityType>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity"; String enableEncoding = "false"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/somePath"; String entity = "someId=someType"; String entityType = null; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "somePath_someId_someType"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>, <entityId> " + "and <entityType>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>, " + "<entityId> and <entityType>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameNonRootServicePathDataModelByEntityOldEncoding @Test public void testBuildTableNameNonRootServicePathDataModelByEntityNewEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-service-path' the Postgis table name is the encoding of the concatenation of <service-path>, " + "<entityId> and <entityType>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity"; String enableEncoding = "true"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/somePath"; String entity = "someId=someType"; String entityType = null; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "x002fsomex0050athxffffsomex0049dxffffsomex0054ype"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>, <entityId> " + "and <entityType>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>, " + "<entityId> and <entityType>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameNonRootServicePathDataModelByEntityNewEncoding @Test public void testBuildTableNameNonRootServicePathDataModelByEntityTypeOldEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-entity-type' the PostgreSQL table name is the encoding of the concatenation of <service-path>, " + "<entityId> and <entityType>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity-type"; String enableEncoding = "false"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/somePath"; String entity = "someId=someType"; String entityType = "someType"; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "somePath_someType"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>, <entityId> " + "and <entityType>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>, " + "<entityId> and <entityType>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameNonRootServicePathDataModelByEntityTypeOldEncoding @Test public void testBuildTableNameNonRootServicePathDataModelByEntityTypeNewEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-entity-type' the PostgreSQL table name is the encoding of the concatenation of <service-path>, " + "<entityId> and <entityType>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity-type"; String enableEncoding = "true"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/somePath"; String entity = "someId=someType"; String entityType = "someType"; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "x002fsomex0050athxffffsomex0054ype"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>, <entityId> " + "and <entityType>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>, " + "<entityId> and <entityType>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameNonRootServicePathDataModelByEntityTypeNewEncoding @Test public void testBuildTableNameNonRootServicePathDataModelByFixedEntityTypeOldEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-fixed-entity-type' the PostgreSQL table name is the encoding of <entityType>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-fixed-entity-type"; String enableEncoding = "false"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/somePath"; String entity = "someId=someType"; String entityType = "someType"; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "someType"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <entityType>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <entityType>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameNonRootServicePathDataModelByFixedEntityTypeOldEncoding @Test public void testBuildTableNameNonRootServicePathDataModelByFixedEntityTypeNewEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-fixed-entity-type' the PostgreSQL table name is the encoding of <entityType>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-fixed-entity-type"; String enableEncoding = "true"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/somePath"; String entity = "someId=someType"; String entityType = "someType"; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "somex0054ype"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <entityType>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <entityType>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameNonRootServicePathDataModelByFixedEntityTypeNewEncoding @Test public void testBuildTableNameRootServicePathDataModelByServicePathOldEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-service-path' the Postgis table name cannot be built"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-service-path"; String enableEncoding = "false"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/"; String entity = null; // irrelevant for this test String entityType = null; // irrelevant for this test String attribute = null; // irrelevant for this test try { sink.buildTableName(servicePath, entity, entityType, attribute); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - The root service path was not detected as not valid"); } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - The root service path was detected as not valid"); } // try catch } // testBuildTableNameRootServicePathDataModelByServicePathOldEncoding @Test public void testBuildTableNameRootServicePathDataModelByServicePathNewEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-service-path' the Postgis table name is the encoding of <service-path>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-service-path"; String enableEncoding = "true"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/"; String entity = null; // irrelevant for this test String entityType = null; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "x002f"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameRootServicePathDataModelByServicePathNewEncoding @Test public void testBuildTableNameRootServicePathDataModelByEntityOldEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-service-path' the Postgis table name is the encoding of the concatenation of <service-path>, " + "<entityId> and <entityType>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity"; String enableEncoding = "false"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/"; String entity = "someId=someType"; String entityType = null; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "someId_someType"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameRootServicePathDataModelByEntityOldencoding @Test public void testBuildTableNameRootServicePathDataModelByEntityNewEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-service-path' the Postgis table name is the encoding of the concatenation of <service-path>, " + "<entityId> and <entityType>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity"; String enableEncoding = "true"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/"; String entity = "someId=someType"; String entityType = null; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "x002fxffffsomex0049dxffffsomex0054ype"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameRootServicePathDataModelByEntityNewEncoding @Test public void testBuildTableNameRootServicePathDataModelByEntityTypeOldEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-entity-type' the Postgis table name is the encoding of the concatenation of <service-path>, " + "<entityId> and <entityType>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity-type"; String enableEncoding = "false"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/"; String entity = "someId=someType"; String entityType = "someType"; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "someType"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameRootServicePathDataModelByEntityTypeOldEncoding @Test public void testBuildTableNameRootServicePathDataModelByEntityTypeNewEncoding() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "'dm-by-entity-type' the Postgis table name is the encoding of the concatenation of <service-path>, " + "<entityId> and <entityType>"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity-type"; String enableEncoding = "true"; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/"; String entity = "someId=someType"; String entityType = "someType"; // irrelevant for this test String attribute = null; // irrelevant for this test try { String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute); String expecetedTableName = "x002fxffffsomex0054ype"; try { assertEquals(expecetedTableName, builtTableName); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>"); throw e; } // try catch } catch (Exception e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - There was some problem when building the table name"); throw e; } // try catch } // testBuildTableNameRootServicePathDataModelByEntityTypeNewEncoding @Test public void testBuildSchemaNameLength() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildSchemaName]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = null; // default String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String service = "tooLooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooongService"; String servicePath = "someServicePath"; try { sink.buildSchemaName(service, servicePath); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildSchemaName]") + "- FAIL - A schema name length greater than 63 characters has not been detected"); assertTrue(false); } catch (Exception e) { assertTrue(true); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildSchemaName]") + "- OK - A schema name length greater than 63 characters has been detected"); } // try catch } // testBuildSchemaNameLength @Test public void testBuildTableNameLengthDataModelByServicePath() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "detected"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-service-path"; String enableEncoding = null; // default String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/tooLooooooooooooooooooooooooooooooooooooooooooooooooooooooongServicePath"; String entity = null; // irrelevant for this test String entityType = null; // irrelevant for this test String attribute = null; // irrelevant for this test try { sink.buildTableName(servicePath, entity, entityType, attribute); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - A table name length greater than 63 characters has not been detected"); assertTrue(false); } catch (Exception e) { assertTrue(true); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - A table name length greater than 63 characters has been detected"); } // try catch } // testBuildTableNameLengthDataModelByServicePath @Test public void testBuildTableNameLengthDataModelByEntity() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity"; String enableEncoding = null; // default String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/tooLooooooooooooooooooooongServicePath"; String entity = "tooLooooooooooooooooooooooooooongEntity"; String entityType = null; // irrelevant for this test String attribute = null; // irrelevant for this test try { sink.buildTableName(servicePath, entity, entityType, attribute); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - A table name length greater than 63 characters has not been detected"); assertTrue(false); } catch (Exception e) { assertTrue(true); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - A table name length greater than 63 characters has been detected"); } // try catch } // testBuildTableNameLengthDataModelByEntity @Test public void testBuildTableNameLengthDataModelByEntityType() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-entity-type"; String enableEncoding = null; // default String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/tooLooooooooooooooooooooongServicePath"; String entity = "tooLooooooooooooooooooooooooooongEntity"; String entityType = "tooLooooooooooooooooooooooooooongEntityType"; // irrelevant for this test String attribute = null; // irrelevant for this test try { sink.buildTableName(servicePath, entity, entityType, attribute); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - A table name length greater than 63 characters has not been detected"); assertTrue(false); } catch (Exception e) { assertTrue(true); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - A table name length greater than 63 characters has been detected"); } // try catch } // testBuildTableNameLengthDataModelByEntityType @Test public void testBuildTableNameLengthDataModelByAttribute() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + " + "detected"); String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = "dm-by-attribute"; String enableEncoding = null; // default String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); String servicePath = "/tooLooooooooooooooongServicePath"; String entity = "tooLooooooooooooooooooongEntity"; String entityType = null; // irrelevant for this test String attribute = "tooLooooooooooooongAttribute"; try { sink.buildTableName(servicePath, entity, entityType, attribute); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- FAIL - A table name length greater than 63 characters has not been detected"); assertTrue(false); } catch (Exception e) { assertTrue(true); System.out.println(getTestTraceHead("[NGSIPostgisSink.buildTableName]") + "- OK - A table name length greater than 63 characters has been detected"); } // try catch } // testBuildTableNameLengthDataModelByAttribute @Test public void testConfigureCache() { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = null; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = "falso"; NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); try { assertTrue(sink.getInvalidConfiguration()); System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- OK - 'enable_cache=falso' was detected"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[NGSIPostgisSink.configure]") + "- FAIL - 'enable_cache=falso' was not detected"); throw e; } // try catch } // testConfigureEnableEncoding @Test public void testInitializeBuildTable() throws Exception { System.out.println(getTestTraceHead("[NGSIPostgisSink.initialize]") + " String attrPersistence = null; // default String batchSize = null; // default String batchTime = null; // default String batchTTL = null; // default String dataModel = null; // default String enableEncoding = null; String enableLowercase = null; // default String host = null; // default String password = null; // default String port = null; // default String username = null; // default String cache = null; // default NGSIPostgisSink sink = new NGSIPostgisSink(); sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding, enableLowercase, host, password, port, username, cache)); // Create a PostgisAggregator NGSIGenericRowAggregator aggregator = new NGSIGenericRowAggregator(); // Create a NGSIEvent String timestamp = "1461136795801"; String correlatorId = "123456789"; String transactionId = "123456789"; String originalService = "someService"; String originalServicePath = "somePath"; String mappedService = "newService"; String mappedServicePath = "newPath"; Map<String, String> headers = new HashMap<>(); headers.put(NGSIConstants.FLUME_HEADER_TIMESTAMP, timestamp); headers.put(CommonConstants.HEADER_CORRELATOR_ID, correlatorId); headers.put(NGSIConstants.FLUME_HEADER_TRANSACTION_ID, transactionId); headers.put(CommonConstants.HEADER_FIWARE_SERVICE, originalService); headers.put(CommonConstants.HEADER_FIWARE_SERVICE_PATH, originalServicePath); headers.put(NGSIConstants.FLUME_HEADER_MAPPED_SERVICE, mappedService); headers.put(NGSIConstants.FLUME_HEADER_MAPPED_SERVICE_PATH, mappedServicePath); ContextElement originalCE = createContextElement(); NGSIEvent event = new NGSIEvent(headers, originalCE.toString().getBytes(), originalCE, null); aggregator.initialize(event); aggregator.setService(event.getServiceForNaming(false)); aggregator.setServicePathForNaming(event.getServicePathForNaming(false)); aggregator.setEntityForNaming(event.getEntityForNaming(false, false)); aggregator.setEntityType(event.getEntityTypeForNaming(false)); aggregator.setAttribute(event.getAttributeForNaming(false)); aggregator.setTableName(sink.buildTableName(aggregator.getServicePathForNaming(), aggregator.getEntityForNaming(), aggregator.getEntityType(), aggregator.getAttribute())); try { assertTrue(aggregator.getTableName(false) != null); System.out.println(getTestTraceHead("[PostgisBAggregator.initialize]") + "- OK - A table name has been created"); } catch (AssertionError e) { System.out.println(getTestTraceHead("[PostgisAggregator.initialize]") + "- FAIL - A table name has not been created"); throw e; } // try catch } // testConfigureEnableEncoding private Context createContext(String attrPersistence, String batchSize, String batchTime, String batchTTL, String dataModel, String enableEncoding, String enableLowercase, String host, String password, String port, String username, String cache) { Context context = new Context(); context.put("attr_persistence", attrPersistence); context.put("batch_size", batchSize); context.put("batch_time", batchTime); context.put("batch_ttl", batchTTL); context.put("data_model", dataModel); context.put("enable_encoding", enableEncoding); context.put("enable_lowercase", enableLowercase); context.put("postgis_host", host); context.put("postgis_password", password); context.put("postgis_port", port); context.put("postgis_username", username); context.put("backend.enable_cache", cache); return context; } // createContext private Context createContext(String attrPersistence, String batchSize, String batchTime, String batchTTL, String dataModel, String enableEncoding, String enableLowercase, String host, String password, String port, String username, String cache, String sqlOptions) { Context context = new Context(); context.put("attr_persistence", attrPersistence); context.put("batch_size", batchSize); context.put("batch_time", batchTime); context.put("batch_ttl", batchTTL); context.put("data_model", dataModel); context.put("enable_encoding", enableEncoding); context.put("enable_lowercase", enableLowercase); context.put("postgis_host", host); context.put("postgis_password", password); context.put("postgis_port", port); context.put("postgis_username", username); context.put("backend.enable_cache", cache); context.put("postgis_options", sqlOptions); return context; } // createContext private Context createContextforNativeTypes(String attrPersistence, String batchSize, String batchTime, String batchTTL, String dataModel, String enableEncoding, String enableLowercase, String host, String password, String port, String username, String cache, String attrNativeTypes) { Context context = new Context(); context.put("attr_persistence", attrPersistence); context.put("batch_size", batchSize); context.put("batch_time", batchTime); context.put("batch_ttl", batchTTL); context.put("data_model", dataModel); context.put("enable_encoding", enableEncoding); context.put("enable_lowercase", enableLowercase); context.put("postgis_host", host); context.put("postgis_password", password); context.put("postgis_port", port); context.put("postgis_username", username); context.put("backend.enable_cache", cache); context.put("attr_native_types", attrNativeTypes); return context; } // createContext private ContextElement createContextElement() { NotifyContextRequest notifyContextRequest = new NotifyContextRequest(); ContextMetadata contextMetadata = new ContextMetadata(); contextMetadata.setName("location"); contextMetadata.setType("string"); contextMetadata.setContextMetadata(new JsonPrimitive("WGS84")); ArrayList<ContextMetadata> metadata = new ArrayList<>(); metadata.add(contextMetadata); ContextAttribute contextAttribute2 = new ContextAttribute(); contextAttribute2.setName("someName2"); contextAttribute2.setType("someType2"); contextAttribute2.setContextValue(new JsonPrimitive("someValue2")); contextAttribute2.setContextMetadata(null); ContextAttribute contextAttribute1 = new ContextAttribute(); contextAttribute1.setName("someName1"); contextAttribute1.setType("geooint"); contextAttribute1.setContextValue(new JsonPrimitive("-3.7167, 40.3833")); contextAttribute1.setContextMetadata(metadata); ArrayList<ContextAttribute> attributes = new ArrayList<>(); attributes.add(contextAttribute2); attributes.add(contextAttribute1); ContextElement contextElement = new ContextElement(); contextElement.setId("someId2"); contextElement.setType("someType"); contextElement.setIsPattern("false"); contextElement.setAttributes(attributes); return contextElement; } // createContextElement private ContextElement createMappedContextElement() { NotifyContextRequest notifyContextRequest = new NotifyContextRequest(); ContextMetadata contextMetadata = new ContextMetadata(); contextMetadata.setName("location"); contextMetadata.setType("string"); contextMetadata.setContextMetadata(new JsonPrimitive("WGS84")); ArrayList<ContextMetadata> metadata = new ArrayList<>(); metadata.add(contextMetadata); ContextAttribute contextAttribute1 = new ContextAttribute(); contextAttribute1.setName("someName1"); contextAttribute1.setType("geo:point"); contextAttribute1.setContextValue(new JsonPrimitive("-3.7167, 40.3833")); contextAttribute1.setContextMetadata(metadata); ContextAttribute contextAttribute2 = new ContextAttribute(); contextAttribute2.setName("someName2"); contextAttribute2.setType("someType2"); contextAttribute2.setContextValue(new JsonPrimitive("someValue2")); contextAttribute2.setContextMetadata(null); ArrayList<ContextAttribute> attributes = new ArrayList<>(); attributes.add(contextAttribute1); attributes.add(contextAttribute2); ContextElement contextElement = new ContextElement(); contextElement.setId("someId2"); contextElement.setType("someType"); contextElement.setIsPattern("false"); contextElement.setAttributes(attributes); return contextElement; } // createContextElement private ContextElement createMappedContextElement2() { NotifyContextRequest notifyContextRequest = new NotifyContextRequest(); ContextMetadata contextMetadata = new ContextMetadata(); contextMetadata.setName("location"); contextMetadata.setType("string"); contextMetadata.setContextMetadata(new JsonPrimitive("NewWGS84")); ArrayList<ContextMetadata> metadata = new ArrayList<>(); metadata.add(contextMetadata); ContextAttribute contextAttribute1 = new ContextAttribute(); contextAttribute1.setName("someName1"); contextAttribute1.setType("geo:point"); contextAttribute1.setContextValue(new JsonPrimitive("-3.7167, 40.3833")); contextAttribute1.setContextMetadata(metadata); ContextAttribute contextAttribute2 = new ContextAttribute(); contextAttribute2.setName("someName2"); contextAttribute2.setType("someType2"); contextAttribute2.setContextValue(new JsonPrimitive("someValue2New")); contextAttribute2.setContextMetadata(null); ArrayList<ContextAttribute> attributes = new ArrayList<>(); attributes.add(contextAttribute1); attributes.add(contextAttribute2); ContextElement contextElement = new ContextElement(); contextElement.setId("someId2"); contextElement.setType("someType"); contextElement.setIsPattern("false"); contextElement.setAttributes(attributes); return contextElement; } // createContextElement private ContextElement createContextElementForNativeTypes() { NotifyContextRequest notifyContextRequest = new NotifyContextRequest(); ContextMetadata contextMetadata = new ContextMetadata(); contextMetadata.setName("someString"); contextMetadata.setType("string"); ArrayList<ContextMetadata> metadata = new ArrayList<>(); metadata.add(contextMetadata); ContextAttribute contextAttribute1 = new ContextAttribute(); contextAttribute1.setName("someNumber"); contextAttribute1.setType("number"); contextAttribute1.setContextValue(new JsonPrimitive(2)); contextAttribute1.setContextMetadata(null); ContextAttribute contextAttribute2 = new ContextAttribute(); contextAttribute2.setName("somneBoolean"); contextAttribute2.setType("Boolean"); contextAttribute2.setContextValue(new JsonPrimitive(true)); contextAttribute2.setContextMetadata(null); ContextAttribute contextAttribute3 = new ContextAttribute(); contextAttribute3.setName("someDate"); contextAttribute3.setType("DateTime"); contextAttribute3.setContextValue(new JsonPrimitive("2016-09-21T01:23:00.00Z")); contextAttribute3.setContextMetadata(null); ContextAttribute contextAttribute4 = new ContextAttribute(); contextAttribute4.setName("someGeoJson"); contextAttribute4.setType("geo:json"); contextAttribute4.setContextValue(new JsonPrimitive("{\"type\": \"Point\",\"coordinates\": [-0.036177,39.986159]}")); contextAttribute4.setContextMetadata(null); ContextAttribute contextAttribute5 = new ContextAttribute(); contextAttribute5.setName("someJson"); contextAttribute5.setType("json"); contextAttribute5.setContextValue(new JsonPrimitive("{\"String\": \"string\"}")); contextAttribute5.setContextMetadata(null); ContextAttribute contextAttribute6 = new ContextAttribute(); contextAttribute6.setName("someString"); contextAttribute6.setType("string"); contextAttribute6.setContextValue(new JsonPrimitive("foo")); contextAttribute6.setContextMetadata(null); ContextAttribute contextAttribute7 = new ContextAttribute(); contextAttribute7.setName("someString2"); contextAttribute7.setType("string"); contextAttribute7.setContextValue(new JsonPrimitive("")); contextAttribute7.setContextMetadata(null); ArrayList<ContextAttribute> attributes = new ArrayList<>(); attributes.add(contextAttribute1); attributes.add(contextAttribute2); attributes.add(contextAttribute3); attributes.add(contextAttribute4); attributes.add(contextAttribute5); attributes.add(contextAttribute6); attributes.add(contextAttribute7); ContextElement contextElement = new ContextElement(); contextElement.setId("someId"); contextElement.setType("someType"); contextElement.setIsPattern("false"); contextElement.setAttributes(attributes); return contextElement; } // createContextElementForNativeTypes public NGSIBatch setUpBatch() { String timestamp = "1461136795801"; String correlatorId = "123456789"; String transactionId = "123456789"; String originalService = "someService"; String originalServicePath = "somePath"; String mappedService = "newService"; String mappedServicePath = "newPath"; String destination = "someDestination"; Map<String, String> headers = new HashMap<>(); headers.put(NGSIConstants.FLUME_HEADER_TIMESTAMP, timestamp); headers.put(CommonConstants.HEADER_CORRELATOR_ID, correlatorId); headers.put(NGSIConstants.FLUME_HEADER_TRANSACTION_ID, transactionId); headers.put(CommonConstants.HEADER_FIWARE_SERVICE, originalService); headers.put(CommonConstants.HEADER_FIWARE_SERVICE_PATH, originalServicePath); headers.put(NGSIConstants.FLUME_HEADER_MAPPED_SERVICE, mappedService); headers.put(NGSIConstants.FLUME_HEADER_MAPPED_SERVICE_PATH, mappedServicePath); String timestamp2 = "1461136795800"; String correlatorId2 = "123456789"; String transactionId2 = "123456789"; String originalService2 = "someService"; String originalServicePath2 = "somePath"; String mappedService2 = "newService"; String mappedServicePath2 = "newPath"; String destination2 = "someDestination"; Map<String, String> headers2 = new HashMap<>(); headers2.put(NGSIConstants.FLUME_HEADER_TIMESTAMP, timestamp2); headers2.put(CommonConstants.HEADER_CORRELATOR_ID, correlatorId2); headers2.put(NGSIConstants.FLUME_HEADER_TRANSACTION_ID, transactionId2); headers2.put(CommonConstants.HEADER_FIWARE_SERVICE, originalService2); headers2.put(CommonConstants.HEADER_FIWARE_SERVICE_PATH, originalServicePath2); headers2.put(NGSIConstants.FLUME_HEADER_MAPPED_SERVICE, mappedService2); headers2.put(NGSIConstants.FLUME_HEADER_MAPPED_SERVICE_PATH, mappedServicePath2); NotifyContextRequest.ContextElement contextElement = createContextElementForNativeTypes(); NotifyContextRequest.ContextElement contextElement2 = createContextElement(); NGSIEvent ngsiEvent = new NGSIEvent(headers, contextElement.toString().getBytes(), contextElement, null); NGSIEvent ngsiEvent2 = new NGSIEvent(headers2, contextElement2.toString().getBytes(), contextElement2, createMappedContextElement()); NGSIBatch batch = new NGSIBatch(); batch.addEvent(destination, ngsiEvent); batch.addEvent(destination2, ngsiEvent2); return batch; } public NGSIBatch setUpBatchOverlappingEvents() { String timestamp = "1461136795802"; String correlatorId = "123456789"; String transactionId = "123456789"; String originalService = "someService"; String originalServicePath = "somePath"; String mappedService = "newService"; String mappedServicePath = "newPath"; String destination = "someDestination"; Map<String, String> headers = new HashMap<>(); headers.put(NGSIConstants.FLUME_HEADER_TIMESTAMP, timestamp); headers.put(CommonConstants.HEADER_CORRELATOR_ID, correlatorId); headers.put(NGSIConstants.FLUME_HEADER_TRANSACTION_ID, transactionId); headers.put(CommonConstants.HEADER_FIWARE_SERVICE, originalService); headers.put(CommonConstants.HEADER_FIWARE_SERVICE_PATH, originalServicePath); headers.put(NGSIConstants.FLUME_HEADER_MAPPED_SERVICE, mappedService); headers.put(NGSIConstants.FLUME_HEADER_MAPPED_SERVICE_PATH, mappedServicePath); NotifyContextRequest.ContextElement contextElement = createMappedContextElement2(); NGSIEvent ngsiEvent = new NGSIEvent(headers, contextElement.toString().getBytes(), contextElement, null); NGSIBatch batch = setUpBatch(); batch.addEvent(destination, ngsiEvent); return batch; } @Test public void testNativeTypeColumnBatch() throws CygnusBadConfiguration{ String attr_native_types = "true"; NGSIBatch batch = setUpBatch(); // 2 events (1 on someId, 1 on someId2) String destination = "someDestination"; NGSIPostgisSink ngsiPostgisSink = new NGSIPostgisSink(); ngsiPostgisSink.configure(createContextforNativeTypes("column", null, null, null, null, null, null, null, null, null, null, null, attr_native_types)); try { batch.startIterator(); NGSIGenericAggregator aggregator = ngsiPostgisSink.getAggregator(false); while (batch.hasNext()) { destination = batch.getNextDestination(); ArrayList<NGSIEvent> events = batch.getNextEvents(); aggregator.setService(events.get(0).getServiceForNaming(false)); aggregator.setServicePathForData(events.get(0).getServicePathForData()); aggregator.setServicePathForNaming(events.get(0).getServicePathForNaming(false)); aggregator.setEntityForNaming(events.get(0).getEntityForNaming(false, false)); aggregator.setEntityType(events.get(0).getEntityTypeForNaming(false)); aggregator.setAttribute(events.get(0).getAttributeForNaming(false)); aggregator.setDbName(ngsiPostgisSink.buildSchemaName(aggregator.getService(), aggregator.getServicePathForNaming())); aggregator.setTableName(ngsiPostgisSink.buildTableName(aggregator.getServicePathForNaming(), aggregator.getEntityForNaming(), aggregator.getEntityType(), aggregator.getAttribute())); aggregator.setAttrNativeTypes(true); aggregator.setEnableGeoParse(true); aggregator.setAttrMetadataStore(true); aggregator.setEnableNameMappings(true); aggregator.setLastDataMode("insert"); aggregator.initialize(events.get(0)); for (NGSIEvent event : events) { aggregator.aggregate(event); } } // 2 rows (one per event in the batch) String correctBatch = "('someId','someType','somePath','2016-04-20 07:19:55.801',2,'[]',TRUE,'[]','2016-09-21T01:23:00.00Z','[]',ST_GeomFromGeoJSON('\"{\"type\": \"Point\",\"coordinates\": [-0.036177,39.986159]}\"'),'[]','{\"String\": \"string\"}','[]','foo','[]','','[]',NULL,NULL,NULL,NULL),('someId2','someType','somePath','2016-04-20 07:19:55.800',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,ST_SetSRID(ST_MakePoint(-3.7167::double precision , 40.3833::double precision ), 4326),'[{\"name\":\"location\",\"type\":\"string\",\"value\":\"WGS84\"}]','someValue2','[]')"; String valuesForInsert = SQLQueryUtils.getValuesForInsert(aggregator.getAggregationToPersist(), aggregator.isAttrNativeTypes()); if (valuesForInsert.equals(correctBatch)) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testNativeTypesColumnBatch]") + "- OK - NativeTypesOK"); assertTrue(true); } else { assertFalse(true); } } catch (Exception e) { System.out.println(e); assertFalse(true); } } @Test public void testNativeTypeColumnBatchLastData() throws CygnusBadConfiguration{ String attr_native_types = "true"; NGSIBatch batch = setUpBatchOverlappingEvents(); // 3 events (1 on someId, 1 in someId2, 1 in someId2 with new values) String destination = "someDestination"; NGSIPostgisSink ngsiPostgisSink = new NGSIPostgisSink(); ngsiPostgisSink.configure(createContextforNativeTypes("column", null, null, null, null, null, null, null, null, null, null, null, attr_native_types)); try { batch.startIterator(); NGSIGenericAggregator aggregator = ngsiPostgisSink.getAggregator(false); while (batch.hasNext()) { destination = batch.getNextDestination(); ArrayList<NGSIEvent> events = batch.getNextEvents(); aggregator.setService(events.get(0).getServiceForNaming(false)); aggregator.setServicePathForData(events.get(0).getServicePathForData()); aggregator.setServicePathForNaming(events.get(0).getServicePathForNaming(false)); aggregator.setEntityForNaming(events.get(0).getEntityForNaming(false, false)); aggregator.setEntityType(events.get(0).getEntityTypeForNaming(false)); aggregator.setAttribute(events.get(0).getAttributeForNaming(false)); aggregator.setDbName(ngsiPostgisSink.buildSchemaName(aggregator.getService(), aggregator.getServicePathForNaming())); aggregator.setTableName(ngsiPostgisSink.buildTableName(aggregator.getServicePathForNaming(), aggregator.getEntityForNaming(), aggregator.getEntityType(), aggregator.getAttribute())); aggregator.setAttrNativeTypes(true); aggregator.setEnableGeoParse(true); aggregator.setAttrMetadataStore(true); aggregator.setEnableNameMappings(true); aggregator.setLastDataTimestampKey(NGSIConstants.RECV_TIME); aggregator.setLastDataUniqueKey("entityid"); aggregator.setLastDataMode("upsert"); aggregator.initialize(events.get(0)); for (NGSIEvent event : events) { aggregator.aggregate(event); } } // This tests is testing a weird situation: entityid is defined as key, but it is not included // in the notification. In that case the query uses NULL for that attribute position (at the beginning // of the query string). This will fail when the query hits the DB (as keys cannot be NULL) but this tests ensures // Cygnus is doing its job. This is a consequence of the changes done in PR #2199 // in the initialize() method of the NGSIGenericColumnAggregator class // 2 rows (the events on someId2 are aggregated in the same rows) String correctBatch = "(NULL,'someId','someType','somePath','2016-04-20 07:19:55.801',2,'[]',TRUE,'[]','2016-09-21T01:23:00.00Z','[]',ST_GeomFromGeoJSON('\"{\"type\": \"Point\",\"coordinates\": [-0.036177,39.986159]}\"'),'[]','{\"String\": \"string\"}','[]','foo','[]','','[]',NULL,NULL,NULL,NULL),(NULL,'someId2','someType','somePath','2016-04-20 07:19:55.802',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,ST_SetSRID(ST_MakePoint(-3.7167::double precision , 40.3833::double precision ), 4326),'[{\"name\":\"location\",\"type\":\"string\",\"value\":\"NewWGS84\"}]','someValue2New','[]')"; String valuesForInsert = SQLQueryUtils.getValuesForInsert(aggregator.getLastDataToPersist(), aggregator.isAttrNativeTypes()); if (valuesForInsert.equals(correctBatch)) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testNativeTypeColumnBatchLastData]") + "- OK - NativeTypesOK"); assertTrue(true); } else { assertFalse(true); } } catch (Exception e) { System.out.println(e); assertFalse(true); } } @Test public void testNativeTypeColumnBatchLastDataKeyOtherThanEntityId() throws CygnusBadConfiguration{ // Same as testNativeTypeColumnBatchLastData() but with aggregator.setLastDataUniqueKey("someString"); String attr_native_types = "true"; NGSIBatch batch = setUpBatch(); // 2 events (1 on someId, 1 on someId2) String destination = "someDestination"; NGSIPostgisSink ngsiPostgisSink = new NGSIPostgisSink(); ngsiPostgisSink.configure(createContextforNativeTypes("column", null, null, null, null, null, null, null, null, null, null, null, attr_native_types)); try { batch.startIterator(); NGSIGenericAggregator aggregator = ngsiPostgisSink.getAggregator(false); while (batch.hasNext()) { destination = batch.getNextDestination(); ArrayList<NGSIEvent> events = batch.getNextEvents(); aggregator.setService(events.get(0).getServiceForNaming(false)); aggregator.setServicePathForData(events.get(0).getServicePathForData()); aggregator.setServicePathForNaming(events.get(0).getServicePathForNaming(false)); aggregator.setEntityForNaming(events.get(0).getEntityForNaming(false, false)); aggregator.setEntityType(events.get(0).getEntityTypeForNaming(false)); aggregator.setAttribute(events.get(0).getAttributeForNaming(false)); aggregator.setDbName(ngsiPostgisSink.buildSchemaName(aggregator.getService(), aggregator.getServicePathForNaming())); aggregator.setTableName(ngsiPostgisSink.buildTableName(aggregator.getServicePathForNaming(), aggregator.getEntityForNaming(), aggregator.getEntityType(), aggregator.getAttribute())); aggregator.setAttrNativeTypes(true); aggregator.setEnableGeoParse(true); aggregator.setAttrMetadataStore(true); aggregator.setEnableNameMappings(true); aggregator.setLastDataTimestampKey(NGSIConstants.RECV_TIME); aggregator.setLastDataUniqueKey("someString"); aggregator.setLastDataMode("upsert"); aggregator.initialize(events.get(0)); for (NGSIEvent event : events) { aggregator.aggregate(event); } } // 2 rows (one per event in the batch) String correctBatch = "('foo','someId','someType','somePath','2016-04-20 07:19:55.801',2,'[]',TRUE,'[]','2016-09-21T01:23:00.00Z','[]',ST_GeomFromGeoJSON('\"{\"type\": \"Point\",\"coordinates\": [-0.036177,39.986159]}\"'),'[]','{\"String\": \"string\"}','[]','[]','','[]',NULL,NULL,NULL,NULL),(NULL,'someId2','someType','somePath','2016-04-20 07:19:55.800',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,ST_SetSRID(ST_MakePoint(-3.7167::double precision , 40.3833::double precision ), 4326),'[{\"name\":\"location\",\"type\":\"string\",\"value\":\"WGS84\"}]','someValue2','[]')"; String valuesForInsert = SQLQueryUtils.getValuesForInsert(aggregator.getLastDataToPersist(), aggregator.isAttrNativeTypes()); if (valuesForInsert.equals(correctBatch)) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testNativeTypeColumnBatchLastDataKeyOtherThanEntityId]") + "- OK - NativeTypesOK"); assertTrue(true); } else { assertFalse(true); } } catch (Exception e) { System.out.println(e); assertFalse(true); } } @Test public void testNativeTypeColumnBatchLastDataNoKey() throws CygnusBadConfiguration{ // Same as testNativeTypeColumnBatchLastData() but without "aggregator.setLastDataUniqueKey("entityid");" String attr_native_types = "true"; NGSIBatch batch = setUpBatch(); // 2 events (1 on someId, 1 on someId2) String destination = "someDestination"; NGSIPostgisSink ngsiPostgisSink = new NGSIPostgisSink(); ngsiPostgisSink.configure(createContextforNativeTypes("column", null, null, null, null, null, null, null, null, null, null, null, attr_native_types)); try { batch.startIterator(); NGSIGenericAggregator aggregator = ngsiPostgisSink.getAggregator(false); while (batch.hasNext()) { destination = batch.getNextDestination(); ArrayList<NGSIEvent> events = batch.getNextEvents(); aggregator.setService(events.get(0).getServiceForNaming(false)); aggregator.setServicePathForData(events.get(0).getServicePathForData()); aggregator.setServicePathForNaming(events.get(0).getServicePathForNaming(false)); aggregator.setEntityForNaming(events.get(0).getEntityForNaming(false, false)); aggregator.setEntityType(events.get(0).getEntityTypeForNaming(false)); aggregator.setAttribute(events.get(0).getAttributeForNaming(false)); aggregator.setDbName(ngsiPostgisSink.buildSchemaName(aggregator.getService(), aggregator.getServicePathForNaming())); aggregator.setTableName(ngsiPostgisSink.buildTableName(aggregator.getServicePathForNaming(), aggregator.getEntityForNaming(), aggregator.getEntityType(), aggregator.getAttribute())); aggregator.setAttrNativeTypes(true); aggregator.setEnableGeoParse(true); aggregator.setAttrMetadataStore(true); aggregator.setEnableNameMappings(true); aggregator.setLastDataTimestampKey(NGSIConstants.RECV_TIME); aggregator.setLastDataMode("upsert"); aggregator.initialize(events.get(0)); for (NGSIEvent event : events) { aggregator.aggregate(event); } } // 2 rows (one per event in the batch) String correctBatch = "('someId','someType','somePath','2016-04-20 07:19:55.801',2,'[]',TRUE,'[]','2016-09-21T01:23:00.00Z','[]',ST_GeomFromGeoJSON('\"{\"type\": \"Point\",\"coordinates\": [-0.036177,39.986159]}\"'),'[]','{\"String\": \"string\"}','[]','foo','[]','','[]',NULL,NULL,NULL,NULL),('someId2','someType','somePath','2016-04-20 07:19:55.800',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,ST_SetSRID(ST_MakePoint(-3.7167::double precision , 40.3833::double precision ), 4326),'[{\"name\":\"location\",\"type\":\"string\",\"value\":\"WGS84\"}]','someValue2','[]')"; String valuesForInsert = SQLQueryUtils.getValuesForInsert(aggregator.getLastDataToPersist(), aggregator.isAttrNativeTypes()); if (valuesForInsert.equals(correctBatch)) { System.out.println(getTestTraceHead("[NGSIPostgisSink.testNativeTypeColumnBatchLastDataNoKey]") + "- OK - NativeTypesOK"); assertTrue(true); } else { assertFalse(true); } } catch (Exception e) { System.out.println(e); assertFalse(true); } } } // NGSIPostgisSinkTest
package org.modmine.web; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.log4j.Logger; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.tiles.ComponentContext; import org.apache.struts.tiles.actions.TilesAction; import org.intermine.api.InterMineAPI; import org.intermine.api.profile.Profile; import org.intermine.api.query.WebResultsExecutor; import org.intermine.api.results.WebResults; import org.intermine.model.InterMineObject; import org.intermine.model.bio.Protocol; import org.intermine.model.bio.ResultFile; import org.intermine.model.bio.Submission; import org.intermine.objectstore.ObjectStore; import org.intermine.objectstore.query.ConstraintOp; import org.intermine.objectstore.query.Query; import org.intermine.objectstore.query.QueryClass; import org.intermine.objectstore.query.QueryField; import org.intermine.objectstore.query.QueryValue; import org.intermine.objectstore.query.Results; import org.intermine.objectstore.query.SimpleConstraint; import org.intermine.pathquery.Constraints; import org.intermine.pathquery.OrderDirection; import org.intermine.pathquery.OuterJoinStatus; import org.intermine.pathquery.PathQuery; import org.intermine.web.logic.results.PagedTable; import org.intermine.web.logic.session.SessionMethods; /** * Controller for submissionProtocolsDisplayer.jsp * @author Richard Smith */ public class SubmissionProtocolsController extends TilesAction { protected static final Logger LOG = Logger.getLogger(SubmissionProtocolsController.class); /** * {@inheritDoc} */ public ActionForward execute(@SuppressWarnings("unused") ComponentContext context, @SuppressWarnings("unused") ActionMapping mapping, @SuppressWarnings("unused") ActionForm form, HttpServletRequest request, @SuppressWarnings("unused") HttpServletResponse response) throws Exception { HttpSession session = request.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); ObjectStore os = im.getObjectStore(); // submission object InterMineObject o = (InterMineObject) request.getAttribute("object"); LOG.info("SUBMISSION id: " + o.getId()); // create the query PathQuery q = new PathQuery(os.getModel()); q.addView("Submission.appliedProtocols.step"); q.addView("Submission.appliedProtocols.inputs.type"); q.addView("Submission.appliedProtocols.inputs.name"); q.addView("Submission.appliedProtocols.inputs.value"); q.addView("Submission.appliedProtocols.protocol.name"); q.addView("Submission.appliedProtocols.outputs.type"); q.addView("Submission.appliedProtocols.outputs.name"); q.addView("Submission.appliedProtocols.outputs.value"); q.addConstraint(Constraints.eq("Submission.id", o.getId().toString())); // rm the outer join for i/o: check if ok. if not add q.setOuterJoinStatus("Submission.appliedProtocols.inputs", OuterJoinStatus.OUTER); q.setOuterJoinStatus("Submission.appliedProtocols.outputs", OuterJoinStatus.OUTER); q.addOrderBy("Submission.appliedProtocols.step", OrderDirection.ASC); Profile profile = SessionMethods.getProfile(session); WebResultsExecutor executor = im.getWebResultsExecutor(profile); WebResults results = executor.execute(q); if (results.size() > 2000) { request.setAttribute("subId", o.getId()); return null; } PagedTable pagedTable = new PagedTable(results); // NB: you need to set a maximum, default is 10! pagedTable.setPageSize(2000); request.setAttribute("pagedResults", pagedTable); // let's get also the dccid (needed for external link) // maybe it can be gained in a simpler way Query q1 = new Query(); QueryClass qc = new QueryClass(Submission.class); QueryField qcId = new QueryField(qc, "id"); q1.addFrom(qc); q1.addToSelect(qc); SimpleConstraint sc = new SimpleConstraint(qcId, ConstraintOp.EQUALS, new QueryValue(o.getId())); q1.setConstraint(sc); Results result = os.executeSingleton(q1); // and dccId, protocols String dccId = null; Set<ResultFile> rf = new HashSet<ResultFile>(); Set<Protocol> pt = new HashSet<Protocol>(); Iterator i = result.iterator(); while (i.hasNext()) { Submission sub = (Submission) i.next(); dccId = sub.getdCCid(); pt = sub.getProtocols(); rf = sub.getResultFiles(); } request.setAttribute("DCCid", dccId); request.setAttribute("protocols", pt); request.setAttribute("files", rf); return null; } }
package org.objectweb.proactive.core.group; import java.lang.reflect.InvocationTargetException; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.ListIterator; import java.util.Map; import java.util.Set; import java.util.Vector; import org.apache.log4j.Logger; import org.objectweb.proactive.Body; import org.objectweb.proactive.ProActive; import org.objectweb.proactive.core.UniqueID; import org.objectweb.proactive.core.body.LocalBodyStore; import org.objectweb.proactive.core.body.proxy.AbstractProxy; import org.objectweb.proactive.core.component.ProActiveInterface; import org.objectweb.proactive.core.group.spmd.MethodCallSetSPMDGroup; import org.objectweb.proactive.core.group.threadpool.ThreadPool; import org.objectweb.proactive.core.mop.ConstructionOfReifiedObjectFailedException; import org.objectweb.proactive.core.mop.ConstructorCall; import org.objectweb.proactive.core.mop.MOP; import org.objectweb.proactive.core.mop.MethodCall; import org.objectweb.proactive.core.mop.StubObject; public class ProxyForGroup extends AbstractProxy implements org.objectweb.proactive.core.mop.Proxy, Group, java.io.Serializable { /** The logger for the Class */ protected static Logger logger = Logger.getLogger(ProxyForGroup.class.getName()); /** The name of the Class : all members of the group are "className" assignable */ protected String className; /** The list of member : it contains exclusively, StubObjects connected to Proxies, or Java Objects */ protected Vector memberList; /** The map : to name members of the group*/ protected Map elementNames; /** Unique identificator for body (avoid infinite loop in some hierarchicals groups) */ // NOT FULLY IMPLEMENTED !!! transient private UniqueID proxyForGroupID; /** Number of awaited methodcall on the group's member. The Semantic is : we wait all call are done before continuing */ protected int waited = 0; /** Flag to deternime the semantic of communication (broadcast or dispatching) */ protected boolean dispatching = false; /** Flag to deternime the semantic of communication (unique serialization of parameters or not) */ protected boolean uniqueSerialization = false; /** The stub of the typed group */ protected StubObject stub; /** A pool of thread to serve the request */ transient private ThreadPool threadpool; public ProxyForGroup(String nameOfClass) throws ConstructionOfReifiedObjectFailedException { this.className = nameOfClass; this.memberList = new Vector(); this.proxyForGroupID = new UniqueID(); this.threadpool = new ThreadPool(); this.elementNames = new HashMap(); } public ProxyForGroup(String nameOfClass, Integer size) throws ConstructionOfReifiedObjectFailedException { this.className = nameOfClass; this.memberList = new Vector(size.intValue()); this.proxyForGroupID = new UniqueID(); this.threadpool = new ThreadPool(); this.elementNames = new HashMap(); } public ProxyForGroup() throws ConstructionOfReifiedObjectFailedException { this.memberList = new Vector(); this.proxyForGroupID = new UniqueID(); this.threadpool = new ThreadPool(); this.elementNames = new HashMap(); } public ProxyForGroup(ConstructorCall c, Object[] p) throws ConstructionOfReifiedObjectFailedException { this.memberList = new Vector(); this.proxyForGroupID = new UniqueID(); this.threadpool = new ThreadPool(); this.elementNames = new HashMap(); } /** * Allows the Group to dispatch parameters. */ protected void setDispatchingOn() { this.dispatching = true; } /** * Allows the Group to broadcast parameters. */ protected void setDispatchingOff() { this.dispatching = false; } /** * Allows the Group to make an unique serialization of parameters. */ protected void setUniqueSerializationOn() { this.uniqueSerialization = true; } /** * Removes the ability of the Group to make an unique serialization of parameters.. */ protected void setUniqueSerializationOff() { this.uniqueSerialization = false; } /** * Checks the semantic of communication of the Group. * @return <code>true</code> if the "scatter option" is enabled. */ protected boolean isDispatchingOn() { return this.dispatching; } private boolean isDispatchingCall(MethodCall mc) { for (int i = 0; i < mc.getNumberOfParameter(); i++) if (ProActiveGroup.isScatterGroupOn(mc.getParameter(i))) { return true; } return false; } /** * The proxy's method : implements the semantic of communication. This method invokes the * method call <code>mc</code> on each members of the Group. * @param mc the MethodCall to apply on each member of the Group. * @return the result of the call : <b> the result of a method call on a typed group is a * typed group</b>. * @throws InvocationTargetException if a problem occurs when invoking the method on the members of the Group */ public Object reify(MethodCall mc) throws InvocationTargetException { //System.out.println("A method is called : \"" + mc.getName() + "\" on " + this.memberList.size() + " membres."); /* if the method called is toString, apply it to the proxy, not to the members */ if ("toString".equals(mc.getName())) { return this.toString(); } /* if the method called is hashCode, apply it to the proxy, not to the members */ if ("hashCode".equals(mc.getName())) { return new Integer(this.hashCode()); } /* result will be a stub on a proxy for group representing the group of results */ Object result = null; /* check if the threadpool is big enough to make the call (is there is not enough thread, create new ones) */ this.threadpool.checkNumberOfThreads(this.memberList.size()); /* if OneWay : do not construct result */ if (AbstractProxy.isOneWayCall(mc)) { this.oneWayCallOnGroup(mc); } /* Special case : the method returns void but is Synchronous because it throws Exception */ else if (mc.getReifiedMethod().getReturnType() == Void.TYPE) { this.oneWayCallOnGroup(mc); } /* if the call is asynchronous the group of result will be a group a future */ else { // with group in general case : SYNC == ASYNC !!!! result = this.asynchronousCallOnGroup(mc); } /* A barrier of synchronisation to be sur that all calls are done before continuing the execution */ this.threadpool.complete(); return result; } /** * Creates and initializes (and returns) the group of result, then launch threads for asynchronous call of each member. * @param mc the MethodCall to be applied on each member of the Group. * @return the result of the call. */ protected synchronized Object asynchronousCallOnGroup(MethodCall mc) { Object result; Body body = ProActive.getBodyOnThis(); // Creates a stub + ProxyForGroup for representing the result try { Object[] paramProxy = new Object[0]; result = MOP.newInstance(mc.getReifiedMethod().getReturnType() .getName(), null, ProxyForGroup.class.getName(), paramProxy); ((ProxyForGroup) ((StubObject) result).getProxy()).className = mc.getReifiedMethod() .getReturnType() .getName(); } catch (Exception e) { e.printStackTrace(); return null; } int size = this.memberList.size(); // Init the lists of result with null value to permit the "set(index)" operation Vector memberListOfResultGroup = ((ProxyForGroup) ((StubObject) result).getProxy()).memberList; for (int i = 0; i < size; i++) { memberListOfResultGroup.add(null); } // Creating Threads if (isDispatchingCall(mc) == false) { if (uniqueSerialization) { mc.transformEffectiveArgumentsIntoByteArray(); } for (int index = 0; index < this.memberList.size(); index++) this.threadpool.addAJob(new ProcessForAsyncCall(this, this.memberList, memberListOfResultGroup, index, mc, body)); } else { // isDispatchingCall == true for (int index = 0; index < memberList.size(); index++) { Object[] individualEffectiveArguments = new Object[mc.getNumberOfParameter()]; for (int i = 0; i < mc.getNumberOfParameter(); i++) if (ProActiveGroup.isScatterGroupOn(mc.getParameter(i))) { individualEffectiveArguments[i] = ProActiveGroup.get(mc.getParameter( i), index % ProActiveGroup.size(mc.getParameter(i))); } else { individualEffectiveArguments[i] = mc.getParameter(i); } this.threadpool.addAJob(new ProcessForAsyncCall(this, this.memberList, memberListOfResultGroup, index, new MethodCall(mc.getReifiedMethod(), individualEffectiveArguments), body)); } } LocalBodyStore.getInstance().setCurrentThreadBody(body); return result; } /** * Add the results (Future) into the typed group result at the correct poisition. * @param memberListOfResultGroup the list of the typed group result. * @param result the result of a call on member of a Group. * @param index the rank of the result. */ protected synchronized void addToListOfResult( Vector memberListOfResultGroup, Object result, int index) { memberListOfResultGroup.set(index, result); } /** * Launchs the threads for OneWay call of each member of the Group. * @param mc the MethodCall to be applied on each member of the Group. */ protected synchronized void oneWayCallOnGroup(MethodCall mc) { Body body = ProActive.getBodyOnThis(); ExceptionList exceptionList = new ExceptionList(); // Creating Threads if (isDispatchingCall(mc) == false) { if (uniqueSerialization) { mc.transformEffectiveArgumentsIntoByteArray(); } for (int index = 0; index < this.memberList.size(); index++) { this.threadpool.addAJob(new ProcessForOneWayCall(this, this.memberList, index, mc, body, exceptionList)); } } else { // isDispatchingCall == true for (int index = 0; index < memberList.size(); index++) { Object[] individualEffectiveArguments = new Object[mc.getNumberOfParameter()]; for (int i = 0; i < mc.getNumberOfParameter(); i++) if (ProActiveGroup.isScatterGroupOn(mc.getParameter(i))) { individualEffectiveArguments[i] = ProActiveGroup.get(mc.getParameter( i), index % ProActiveGroup.size(mc.getParameter(i))); } else { individualEffectiveArguments[i] = mc.getParameter(i); } this.threadpool.addAJob(new ProcessForOneWayCall(this, this.memberList, index, new MethodCall(mc.getReifiedMethod(), individualEffectiveArguments), body, exceptionList)); } } LocalBodyStore.getInstance().setCurrentThreadBody(body); if (exceptionList.size() != 0) { throw exceptionList; } } /** * If o is a reified object and if it is "assignableFrom" the class of the group, add it into the group<br> * - if o is a group merge it into the group<br> * - if o is not a reified object nor a group : do nothing<br> * @param o - element whose presence in this group is to be ensured * @return <code>true</code> if this collection changed as a result of the call */ public boolean add(Object o) { try { if ((MOP.forName(this.className)).isAssignableFrom(o.getClass())) { /* if o is an reified object and if it is "assignableFrom" the class of the group, ... add it into the group */ if (MOP.isReifiedObject(o)) { return this.memberList.add(o); } // COMPONENTS /* if o is a reference on a component interface*/ else if (o instanceof ProActiveInterface) { return this.memberList.add(o); } /* if o is a Group */ else if (o instanceof org.objectweb.proactive.core.group.ProxyForGroup) { /* like an addMerge call */ return this.memberList.addAll(((org.objectweb.proactive.core.group.ProxyForGroup) o).memberList); } /* o is a standard Java object */ else { return this.memberList.add(o); } } else { if (logger.isInfoEnabled()) { logger.info("uncompatible Object"); } return false; } } catch (java.lang.ClassNotFoundException e) { if (logger.isInfoEnabled()) { logger.info("Unknown class : " + this.className); } } return true; } /** * Adds all of the elements in the specified Collection to this Group. * @param c - the elements to be inserted into this Group. * @return <code>true</code> if this collection changed as a result of the call. */ public boolean addAll(Collection c) { boolean modified = false; Iterator iterator = c.iterator(); while (iterator.hasNext()) { modified |= this.add(iterator.next()); } return modified; } /** * Removes all of the elements from this group. * This group will be empty after this method returns. */ public void clear() { this.memberList.clear(); } /** * This method returns true if and only if this group contains at least one element e such that <code>o.equals(e)</code> * @return <code>true</code> if this collection contains the specified element. */ public boolean contains(Object o) { return this.memberList.contains(o); } /** * Checks if this Group contains all of the elements in the specified collection. * @param c - the collection to be checked for containment in this Group. * @return <code>true</code> if this Group contains all of the elements in the specified collection */ public boolean containsAll(Collection c) { boolean contained; Iterator iterator = c.iterator(); while (iterator.hasNext()) { contained = this.contains(iterator.next()); if (!contained) { return false; } } return true; } /** * Compares the specified object with this group for equality. * @param o the Object for wich we test the equality. * @return <code>true</code> if <code>o</code> is the same Group as <code>this</code>. */ public boolean equals(Object o) { if (o instanceof org.objectweb.proactive.core.group.ProxyForGroup) { return this.proxyForGroupID.equals(((org.objectweb.proactive.core.group.ProxyForGroup) o).proxyForGroupID); } else { return false; } } /** * Returns the hash code value for this Group. * @return the hash code value for this Group. */ public int hashCode() { return this.memberList.hashCode(); } /** * Check if the group is empty. * @return <code>true</code> if this collection contains no elements. */ public boolean isEmpty() { return this.memberList.isEmpty(); } /** * Returns an Iterator of the member in the Group. * @return an Iterator of the member in the Group. */ public Iterator iterator() { return this.memberList.iterator(); } /** * Removes a single instance of the specified element from this Group, if it is present. * It removes the first occurence e where <code>o.equals(e)</code> returns <code>true</code>. * @param o the element to be removed from this Group (if present). * @return <code>true> if the Group contained the specified element. */ public boolean remove(Object o) { return this.memberList.remove(o); } /** * Removes all this Group's elements that are also contained in the specified collection. * After this call returns, this collection will contain no elements in common with the specified collection. * @param c - elements to be removed from this Group. * @return <code>true</code> if this Group changed as a result of the call */ public boolean removeAll(Collection c) { boolean modified = false; Iterator iterator = c.iterator(); while (iterator.hasNext()) { modified |= this.remove(iterator.next()); } return modified; } /** * Retains only the elements in this Group that are contained in the specified collection. * It removes from this Group all of its elements that are not contained in the specified collection. * @param c - elements to be retained in this Group. * @return <code>true</code> if this Group changed as a result of the call. */ public boolean retainAll(Collection c) { boolean modified = false; Iterator iterator = c.iterator(); while (iterator.hasNext()) { Object tmp = iterator.next(); if (this.contains(tmp)) { modified |= this.remove(tmp); } } return modified; } /** * Returns the number of member in this Group. * @return the number of member in this Group. */ public int size() { return this.memberList.size(); } /** * Returns an array containing all of the elements in this Group in the correct order. * @return an array containing all of the elements in this Group in the correct order. */ public Object[] toArray() { return this.memberList.toArray(); } /** * Returns an array containing all of the elements in this collection; * the runtime type of the returned array is that of the specified array. * @param a - the array into which the elements of this collection are to be stored, if it is big enough; * otherwise, a new array of the same runtime type is allocated for this purpose. * @return an array containing the elements of this collection. */ public Object[] toArray(Object[] a) { return this.memberList.toArray(a); } /** * Add all member of the group <code>ogroup</code> into the Group. <code>ogroup</code> can be :<br> * - a typed group<br> * - a Group<br> * - a standard Object<br> * but it have to be (or to extend) the Class of the Group. * @param oGroup the object(s) to merge into the Group. */ public void addMerge(Object oGroup) { try { /* check oGroup is an Reified Object and if it is "assignableFrom" the class of the group */ if ((MOP.isReifiedObject(oGroup)) && ((MOP.forName(this.className)).isAssignableFrom( oGroup.getClass()))) { /* check oGroup is an object representing a group */ if (((StubObject) oGroup).getProxy() instanceof org.objectweb.proactive.core.group.ProxyForGroup) { memberList.addAll(((ProxyForGroup) ((StubObject) oGroup).getProxy()).memberList); } /* if oGroup is a Standard Active Object (but not a group), just add it */ else { this.add(oGroup); } } /* if oGroup is a Group */ else if (oGroup instanceof org.objectweb.proactive.core.group.ProxyForGroup) { memberList.addAll(((org.objectweb.proactive.core.group.ProxyForGroup) oGroup).memberList); } } catch (java.lang.ClassNotFoundException e) { if (logger.isInfoEnabled()) { logger.info("Unknown class : " + this.className); } } } /** * Returns the index of the first occurence of the specified Object <code>obj</code>. * @param obj - the obj tahat is searched in the Group. * @return the rank of <code>obj</code> in the Group. * -1 if the list does not contain this object. */ public int indexOf(Object obj) { return this.memberList.indexOf(obj); } /** * Returns a list iterator of the members in this Group (in proper sequence). * @return a list iterator of the members in this Group. */ public ListIterator listIterator() { return this.memberList.listIterator(); } /** * Removes the element at the specified position. * @param index the rank of the object to remove in the Group. * @return the object that has been removed */ public Object remove(int index) { // decrease indexes in the map element names <-> indexes Iterator it = elementNames.keySet().iterator(); while (it.hasNext()) { String key = (String)it.next(); Integer value = (Integer)elementNames.get(key); if (value.intValue() > index) { elementNames.put(key, new Integer(value.intValue() - 1)); } } return this.memberList.remove(index); } /** * Returns the i-th member of the group. * @param i - the rank of the object to return. * @return the member of the Group at the specified rank. */ public Object get(int i) { return this.memberList.get(i); } /** * Returns the ("higher") Class of group's member. * @return the Class that all Group's members are (or extend). * @throws java.lang.ClassNotFoundException if the class name of the Group is not known. */ public Class getType() throws java.lang.ClassNotFoundException { return MOP.forName(this.className); } /** * Returns the full name of ("higher") Class of group's member * @return the name of the Class that all Group's members are (or extend). */ public String getTypeName() { return this.className; } /** * Returns an Object (a <b>typed group</b> Object) representing the Group * @return a typed group corresponding to the Group. */ public Object getGroupByType() { Object result; try { // a new proxy is generated result = MOP.newInstance(this.className, null, ProxyForGroup.class.getName(), null); } catch (Exception e) { e.printStackTrace(); return null; } ProxyForGroup proxy = (ProxyForGroup) ((StubObject) result).getProxy(); proxy.memberList = this.memberList; proxy.className = this.className; proxy.proxyForGroupID = this.proxyForGroupID; proxy.waited = this.waited; return result; } // This is the best thing to do, but createStubObject has a private acces !!!! : // // Instanciates the stub object // StubObject stub = MOP.createStubObject(this.className, MOP.forName(this.className)); // // Connects the proxy to the stub // stub.setProxy(this); // return stub; // An other way is to "store" the stub and return it when asked /** * Creates a new group with all members of the group and all the members of the group <code>g</code> * @param g - a group * @return a group that contain all the members of the group and <code>g</code>. <code>null<code> if the class of the group is incompatible. */ public Group union (Group g) { try { if ((MOP.forName(this.getTypeName())).isAssignableFrom(MOP.forName(g.getTypeName()))) { ProxyForGroup result = new ProxyForGroup(this.getTypeName()); // add the members of this Iterator it = this.iterator(); while (it.hasNext()) { result.add(it.next()); } // add the members of g it = g.iterator(); while (it.hasNext()) { result.add(it.next()); } return result; } } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (ConstructionOfReifiedObjectFailedException e) { e.printStackTrace(); } // the group are incompatible (i.e. they have not members of the the same class) return null; } /** * Creates a new group with all members that belong to the group and to the group <code>g</code>. * @param g - a group * @return a group that contain the common members of the group and <code>g</code>. <code>null<code> if the class of the group is incompatible. */ public Group intersection (Group g) { try { if ((MOP.forName(this.getTypeName())).isAssignableFrom(MOP.forName(g.getTypeName()))) { ProxyForGroup result = new ProxyForGroup(this.getTypeName()); Object member; Iterator it = this.iterator(); // add the members of the group that belong to g while (it.hasNext()) { member = it.next(); if (g.indexOf(member) > -1) { result.add(member); } } return result; } } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (ConstructionOfReifiedObjectFailedException e) { e.printStackTrace(); } // the group are incompatible (i.e. they have not members of the the same class) return null; } /** * Creates a new group with the members that belong to the group, but not to the group <code>g</code>. * @param g - a group * @return a group that contain the members of the group without the member <code>g</code>. <code>null<code> if the class of the group is incompatible. */ public Group exclude (Group g) { try { if ((MOP.forName(this.getTypeName())).isAssignableFrom(MOP.forName(g.getTypeName()))) { ProxyForGroup result = new ProxyForGroup(this.getTypeName()); Object member; Iterator it = this.iterator(); while (it.hasNext()) { member = it.next(); if (g.indexOf(member) < 0) { result.add(member); } } return result; } } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (ConstructionOfReifiedObjectFailedException e) { e.printStackTrace(); } // the group are incompatible (i.e. they have not members of the the same class) return null; } /** * Creates a new group with all members that belong to the group or to the group <code>g</code>, but not to both. * @param g - a group * @return a group that contain the non-common members of the group and <code>g</code>. <code>null<code> if the class of the group is incompatible. */ public Group difference (Group g) { try { if ((MOP.forName(this.getTypeName())).isAssignableFrom(MOP.forName(g.getTypeName()))) { ProxyForGroup result = new ProxyForGroup(this.getTypeName()); Object member; Iterator it = this.iterator(); // add the members of the group that do not belong to g while (it.hasNext()) { member = it.next(); if (g.indexOf(member) < 0) { result.add(member); } } it = g.iterator(); // add the members of g that do not belong to the group while (it.hasNext()) { member = it.next(); if (this.indexOf(member) < 0) { result.add(member); } } return result; } } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (ConstructionOfReifiedObjectFailedException e) { e.printStackTrace(); } // the group are incompatible (i.e. they have not members of the the same class) return null; } /** * Creates a new group with the members of the group begining at the index <code>begin</code> and ending at the index <code>end</code>. * @param begin - the begining index * @param end - the ending index * @return a group that contain the members of the group from <code>begin</code> to <code>end</code>. <code>null</code> if <code>begin > end</code>. */ public Group range (int begin, int end) { // bag arguments => return null if (begin > end) { return null; } if (begin < 0) { begin = 0; } if (end > this.size()) { end = this.size(); } try { ProxyForGroup result = new ProxyForGroup(this.getTypeName()); for (int i = begin ; i <= end ; i++) { result.add(this.get(i)); } return result; } catch (ConstructionOfReifiedObjectFailedException e) { e.printStackTrace(); return null; } } /** * Communicates the SPMD Group to members * @param spmdGroup - the SPMD group */ public void setSPMDGroup (Object spmdGroup) { try { this.reify(new MethodCallSetSPMDGroup(spmdGroup)); } catch (InvocationTargetException e) { logger.info("Unable to set the SPMD group"); e.printStackTrace(); } } /** * To debug, display the size of the Group and all its members with there position */ public void display() { logger.info("Number of member : " + memberList.size()); for (int i = 0; i < memberList.size(); i++) logger.info(" " + i + " : " + memberList.get(i).getClass().getName()); } /** * Waits that all the members are arrived. */ public void waitAll() { ProActive.waitForAll(this.memberList); } /** * Waits that at least one member is arrived. */ public void waitOne() { ProActive.waitForAny(this.memberList); } /** * Waits that the member at the specified rank is arrived. * @param n - the rank of the awaited member. */ public void waitTheNth(int n) { ProActive.waitFor(this.memberList.get(n)); } /** * Waits that at least <code>n</code> members are arrived. * @param n - the number of awaited members. */ public void waitN(int n) { for (int i = 0; i < n; i++) { this.waitTheNth(i); } } /** * Waits that at least one member is arrived and returns it. * @return a non-awaited member of the Group. */ public Object waitAndGetOne() { return this.memberList.get(ProActive.waitForAny(this.memberList)); } /** * Waits one future is arrived and returns it (removes it from the group). * @return a member of <code>o</code>. (<code>o</code> is removed from the group) */ public Object waitAndGetOneThenRemoveIt() { return this.memberList.remove(ProActive.waitForAny(this.memberList)); } /** * Waits that the member at the specified rank is arrived and returns it. * @param n - the rank of the wanted member. * @return the member (non-awaited) at the rank <code>n</code> in the Group. */ public Object waitAndGetTheNth(int n) { ProActive.waitForTheNth(this.memberList, n); return this.memberList.get(n); } /** * Waits that at least one member is arrived and returns its index. * @return the index of a non-awaited member of the Group. */ public int waitOneAndGetIndex() { int index = 0; this.memberList.get(ProActive.waitForAny(this.memberList)); while (ProActive.isAwaited(this.memberList.get(index))) { index++; } return index; } /** * Checks if all the members of the Group are awaited. * @return <code>true</code> if all the members of the Group are awaited. */ public boolean allAwaited() { for (int i = 0; i < this.memberList.size(); i++) if (!(ProActive.isAwaited(this.memberList.get(i)))) { return false; } return true; } /** * Checks if all the members of the Group are arrived. * @return <code>true</code> if all the members of the Group are arrived. */ public boolean allArrived() { for (int i = 0; i < this.memberList.size(); i++) if (ProActive.isAwaited(this.memberList.get(i))) { return false; } return true; } /** * Returns an ExceptionList containing all the throwables (exceptions and errors) occured * when this group was built * @return an ExceptionList */ public ExceptionList getExceptionList() { ExceptionList exceptionList = new ExceptionList(); for (int i = 0; i < this.memberList.size(); i++) { if (this.memberList.get(i) instanceof Throwable) { exceptionList.add(new ExceptionInGroup(null, (Throwable) this.memberList.get(i))); } } return exceptionList; } /** * Removes all exceptions and null references contained in the Group. * Exceptions (and null references) appears with communication/program-level/runtime errors * and are stored in the Group. * (After this operation the size of the Group decreases) */ public void purgeExceptionAndNull() { Iterator it = this.memberList.iterator(); while (it.hasNext()) { Object element = (Object) it.next(); if ((element instanceof Throwable) || (element == null)) { it.remove(); } } } /** * Modifies the number of members served by one thread * @param i - the new ratio */ public void setRatioNemberToThread(int i) { this.threadpool.ratio(i); } /** * Modifies the number of additional threads to serve members * @param i - the new number */ public void setAdditionalThread(int i) { this.threadpool.thread(i); } /** * Builds the members using the threads (of the threadpool). * @param className - the name of the Class of the members. * @param params - an array that contains the parameters for the constructor of member. * @param nodeList - the nodes where the member will be created. * @param className - the name of the Class of the members. * @param params - an array that contains the parameters for the constructor of members. * @param nodeList - the nodes where the member will be created. */ protected void createMemberWithMultithread(String className, Object[][] params, String[] nodeList) { // Initializes the Group to the correct size for (int i = 0; i < params.length; i++) { this.memberList.add(null); } for (int i = 0; i < params.length; i++) { this.threadpool.addAJob(new ProcessForGroupCreation(this, className, params[i], nodeList[i % nodeList.length], i)); } this.threadpool.complete(); } /** * Builds the members using the threads (of the threadpool). * @param className - the name of the Class of the members. * @param params - the parameters for the constructor of members. * @param nodeList - the nodes where the member will be created. */ protected void createMemberWithMultithread(String className, Object[] params, String[] nodeList) { // Initializes the Group to the correct size for (int i = 0; i < params.length; i++) { this.memberList.add(null); } for (int i = 0; i < params.length; i++) { this.threadpool.addAJob(new ProcessForGroupCreation(this, className, params, nodeList[i % nodeList.length], i)); } this.threadpool.complete(); } /** * Sets an object to the specified position in the Group * @param index - the position * @param o - the object to add */ public void set(int index, Object o) { this.memberList.set(index, o); } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { // this.threadpool.finalize(); out.defaultWriteObject(); } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { in.defaultReadObject(); this.proxyForGroupID = new UniqueID(); this.threadpool = new ThreadPool(); } //Map class style method /** * Returns <code>true</code> if this Group contains a mapping for the specified key. * More formally, returns <code>true</code> if and only if this Group contains at * a mapping for a key <code>k</code> such that <code>(key==null ? k==null : key.equals(k))</code>. * (There can be at most one such mapping.) * @param key - key whose presence in this Group is to be tested. * @return <code>true</code> if this Group contains a mapping for the specified key. * @throws ClassCastException - if the key is of an inappropriate type for this Group (optional). * @throws NullPointerException - if the key is null and this Group does not not permit null keys (optional). */ public boolean containsKey(String key) { return this.elementNames.containsKey(key); } /** * Returns <code>true</code> if this Group maps one or more keys to the specified value. * More formally, returns <code>true</code> if and only if this Group contains at least * one mapping to a value <code>v</code> such that <code>(value==null ? v==null : value.equals(v))</code>. * @param value - value whose presence in this map is to be tested. * @return <code>true</code> if this Group maps one or more keys to the specified value. * @throws ClassCastException - if the value is of an inappropriate type for this Collection (optional). * @throws NullPointerException - if the value is null and this Group does not not permit null values (optional). */ public boolean containsValue(Object value) { return this.memberList.contains(value); } /** * Returns the Object to which this Group maps the specified key. * Returns <code>null</code> if the Collection contains no mapping for this key. * A return value of <code>null</code> does not necessarily indicate that the Collection * contains no mapping for the key; it's also possible that the Group explicitly maps the key to null. * The containsKey operation may be used to distinguish these two cases. * More formally, if this Group contains a mapping from a key <code>k</code> to a value * <code>v</code> such that <code>(key==null ? k==null : key.equals(k))</code>, * then this method returns <code>v</code>; otherwise it returns <code>null</code>. * (There can be at most one such mapping.) * @param key - key whose associated value is to be returned. * @return the value to which this map maps the specified key, or <code>null</code> if the map contains no mapping for this key. * @throws ClassCastException - if the key is of an inappropriate type for this Group (optional). * @throws NullPointerException - key is <code>null</code> and this Group does not not permit null keys (optional). */ public synchronized Object getNamedElement(String key) { return get(((Integer)this.elementNames.get(key)).intValue()); } public synchronized void addNamedElement(String key, Object value) { if (elementNames.containsKey(key)) { removeNamedElement(key); } this.elementNames.put(key,new Integer(this.size())); this.add(value); } /** * Returns a set view of the keys contained in this Group. * The set is backed by the Group, so changes to the Group are reflected in the set, * and vice-versa. If the Group is modified while an iteration over the set is in progress, * the results of the iteration are undefined. The set supports element removal, * which removes the corresponding mapping from the Group, via the Iterator.remove, * Set.remove, removeAll retainAll, and clear operations. * It does not support the add or addAll operations. * @return a set view of the keys contained in this Group. */ public Set keySet() { return this.elementNames.keySet(); } /** * Removes the named element of the group. It also returns this element. * @param key the name of the element * @return the removed element */ public synchronized Object removeNamedElement(String key) { int index = ((Integer)elementNames.get(key)).intValue(); Object removed = get(index); remove(index); elementNames.remove(key); return removed; } }
package com.rarchives.ripme.ripper.rippers; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLEncoder; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.rarchives.ripme.ui.RipStatusMessage; import org.json.JSONArray; import org.json.JSONObject; import org.jsoup.Connection; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import com.rarchives.ripme.ripper.AbstractHTMLRipper; import com.rarchives.ripme.utils.Http; public class TsuminoRipper extends AbstractHTMLRipper { private Map<String,String> cookies = new HashMap<>(); public TsuminoRipper(URL url) throws IOException { super(url); } private JSONArray getPageUrls() { String postURL = "http: try { // This sessionId will expire and need to be replaced cookies.put("ASP.NET_SessionId","c4rbzccf0dvy3e0cloolmlkq"); logger.info(cookies); Document doc = Jsoup.connect(postURL).data("q", getAlbumID()).userAgent(USER_AGENT).cookies(cookies).referrer("http: String jsonInfo = doc.html().replaceAll("<html>","").replaceAll("<head></head>", "").replaceAll("<body>", "").replaceAll("</body>", "") .replaceAll("</html>", "").replaceAll("\n", ""); logger.info(jsonInfo); JSONObject json = new JSONObject(jsonInfo); logger.info(json.getJSONArray("reader_page_urls")); return json.getJSONArray("reader_page_urls"); } catch (IOException e) { logger.info(e); sendUpdate(RipStatusMessage.STATUS.DOWNLOAD_ERRORED, "Unable to download album, please compete the captcha at http: + getAlbumID() + " and try again"); return null; } } @Override public String getHost() { return "tsumino"; } @Override public String getDomain() { return "tsumino.com"; } @Override public String getGID(URL url) throws MalformedURLException { Pattern p = Pattern.compile("https?: Matcher m = p.matcher(url.toExternalForm()); if (m.matches()) { return m.group(1) + "_" + m.group(2); } throw new MalformedURLException("Expected tsumino URL format: " + "tsumino.com/Book/Info/ID/TITLE - got " + url + " instead"); } private String getAlbumID() { Pattern p = Pattern.compile("https?: Matcher m = p.matcher(url.toExternalForm()); if (m.matches()) { return m.group(1); } return null; } @Override public Document getFirstPage() throws IOException { Connection.Response resp = Http.url(url).response(); cookies.putAll(resp.cookies()); logger.info(resp.parse()); return resp.parse(); } @Override public List<String> getURLsFromPage(Document doc) { JSONArray imageIds = getPageUrls(); List<String> result = new ArrayList<>(); for (int i = 0; i < imageIds.length(); i++) { result.add("http: } return result; } @Override public void downloadURL(URL url, int index) { sleep(1000); addURLToDownload(url, getPrefix(index)); } }
package com.ctrip.platform.dal.daogen.dao; import com.ctrip.platform.dal.daogen.entity.DatabaseSet; import com.ctrip.platform.dal.daogen.entity.DatabaseSetEntry; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; import javax.sql.DataSource; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; public class DaoOfDatabaseSet { private JdbcTemplate jdbcTemplate; public void setDataSource(DataSource dataSource) { this.jdbcTemplate = new JdbcTemplate(dataSource); } public DatabaseSet getAllDatabaseSetById(Integer id) { List<DatabaseSet> dbset = this.jdbcTemplate.query("SELECT id, name, provider, shardingStrategy, groupId, update_user_no, update_time FROM databaseset WHERE id = ?", new Object[]{id}, new RowMapper<DatabaseSet>() { public DatabaseSet mapRow(ResultSet rs, int rowNum) throws SQLException { return DatabaseSet.visitRow(rs); } }); return dbset != null && dbset.size() > 0 ? dbset.get(0) : null; } public List<DatabaseSet> getAllDatabaseSetByName(String name) { List<DatabaseSet> dbset = this.jdbcTemplate.query("SELECT id, name, provider, shardingStrategy, groupId, update_user_no, update_time FROM databaseset WHERE name = ?", new Object[]{name}, new RowMapper<DatabaseSet>() { public DatabaseSet mapRow(ResultSet rs, int rowNum) throws SQLException { return DatabaseSet.visitRow(rs); } }); return dbset; } public List<DatabaseSet> getAllDatabaseSetByGroupId(Integer groupId) { List<DatabaseSet> dbset = this.jdbcTemplate.query("SELECT id, name, provider, shardingStrategy, groupId, update_user_no, update_time FROM databaseset WHERE groupId = ?", new Object[]{groupId}, new RowMapper<DatabaseSet>() { public DatabaseSet mapRow(ResultSet rs, int rowNum) throws SQLException { return DatabaseSet.visitRow(rs); } }); return dbset; } public List<DatabaseSetEntry> getAllDatabaseSetEntryByDbsetid(Integer databaseSet_Id) { List<DatabaseSetEntry> dbset = this.jdbcTemplate.query("SELECT id, name, databaseType, sharding, connectionString, databaseSet_Id, update_user_no, update_time FROM databasesetentry WHERE databaseSet_Id = ?", new Object[]{databaseSet_Id}, new RowMapper<DatabaseSetEntry>() { public DatabaseSetEntry mapRow(ResultSet rs, int rowNum) throws SQLException { return DatabaseSetEntry.visitRow(rs); } }); return dbset; } public DatabaseSetEntry getMasterDatabaseSetEntryByDatabaseSetName(String dbName) { List<DatabaseSetEntry> list = this.jdbcTemplate.query("select en.id, en.name, en.databaseType, en.sharding, en.connectionString, en.databaseSet_Id, en.update_user_no, en.update_time " + "from databasesetentry as en " + "join databaseset as se on en.databaseSet_Id = se.id " + "where se.name = '" + dbName + "' and en.databaseType = 'Master' limit 1;", new RowMapper<DatabaseSetEntry>() { @Override public DatabaseSetEntry mapRow(ResultSet rs, int rowNum) throws SQLException { return DatabaseSetEntry.visitRow(rs); } }); return null != list && list.size() > 0 ? list.get(0) : null; } public int insertDatabaseSet(DatabaseSet dbset) { return this.jdbcTemplate.update("INSERT INTO databaseset(name, provider, shardingStrategy, groupId, update_user_no, update_time) VALUE(?,?,?,?,?,?)", dbset.getName(), dbset.getProvider(), dbset.getShardingStrategy(), dbset.getGroupId(), dbset.getUpdate_user_no(), dbset.getUpdate_time()); } public int insertDatabaseSetEntry(DatabaseSetEntry dbsetEntry) { return this.jdbcTemplate.update("INSERT INTO databasesetentry(name, databaseType, sharding, connectionString, databaseSet_Id, update_user_no, update_time) VALUE(?,?,?,?,?,?,?)", dbsetEntry.getName(), dbsetEntry.getDatabaseType(), dbsetEntry.getSharding(), dbsetEntry.getConnectionString(), dbsetEntry.getDatabaseSet_Id(), dbsetEntry.getUpdate_user_no(), dbsetEntry.getUpdate_time()); } public int updateDatabaseSet(DatabaseSet dbset) { return this.jdbcTemplate.update("UPDATE databaseset SET name=?, provider=?, shardingStrategy=?, groupId=?, update_user_no=?, update_time=? WHERE id=?", dbset.getName(), dbset.getProvider(), dbset.getShardingStrategy(), dbset.getGroupId(), dbset.getUpdate_user_no(), dbset.getUpdate_time(), dbset.getId()); } public int updateDatabaseSetEntry(DatabaseSetEntry dbsetEntry) { return this.jdbcTemplate.update("UPDATE databasesetentry SET name=?, databaseType=?, sharding=?, connectionString=?, databaseSet_Id=?, update_user_no=?, update_time=? WHERE id=?", dbsetEntry.getName(), dbsetEntry.getDatabaseType(), dbsetEntry.getSharding(), dbsetEntry.getConnectionString(), dbsetEntry.getDatabaseSet_Id(), dbsetEntry.getUpdate_user_no(), dbsetEntry.getUpdate_time(), dbsetEntry.getId()); } /** * databaseSet_Identry * * @param dbsetId * @return */ public int deleteDatabaseSetEntryByDbsetId(Integer dbsetId) { return this.jdbcTemplate.update("DELETE FROM databasesetentry WHERE databaseSet_Id=?", dbsetId); } /** * identry * * @param id * @return */ public int deleteDatabaseSetEntryById(Integer id) { return this.jdbcTemplate.update("DELETE FROM databasesetentry WHERE id=?", id); } public int deleteDatabaseSetById(Integer dbsetId) { return this.jdbcTemplate.update("DELETE FROM databaseset WHERE id=?", dbsetId); } }
package com.reucon.commons.web; import org.slf4j.MDC; import javax.servlet.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import java.io.IOException; import java.util.Map; /** * Servlet filter based on logback's MDCInsertingServletFilter with support the following additional keys: * <ul> * <li>req.requestPath</li> * <li>req.remoteUser</li> * <li>req.sessionId</li> * </ul> * Of course the original keys from MDCInsertingServletFilter are also supported: * <ul> * <li>req.remoteHost</li> * <li>req.userAgent</li> * <li>req.requestURI</li> * <li>req.queryString</li> * <li>req.requestURL</li> * <li>req.xForwardedFor</li> * </ul> * * @since 2.0.0 */ public class MdcInsertingServletFilter implements Filter { public static final String REQUEST_REMOTE_HOST_MDC_KEY = "req.remoteHost"; public static final String REQUEST_USER_AGENT_MDC_KEY = "req.userAgent"; public static final String REQUEST_METHOD = "req.method"; public static final String REQUEST_REQUEST_URI = "req.requestURI"; public static final String REQUEST_QUERY_STRING = "req.queryString"; public static final String REQUEST_REQUEST_URL = "req.requestURL"; public static final String REQUEST_X_FORWARDED_FOR = "req.xForwardedFor"; public static final String REQUEST_REQUEST_PATH = "req.requestPath"; public static final String REQUEST_REMOTE_USER = "req.remoteUser"; public static final String REQUEST_SESSION_ID = "req.sessionId"; private static final String USER_AGENT_HEADER = "User-Agent"; private static final String X_FORWARDED_FOR_HEADER = "X-Forwarded-For"; @Override public void init(FilterConfig filterConfig) throws ServletException { } @Override public void destroy() { } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { final Map originalContextMap = MDC.getCopyOfContextMap(); try { insertBasicProperties(request); chain.doFilter(request, response); } finally { if (originalContextMap == null) { MDC.clear(); } else { MDC.setContextMap(originalContextMap); } } } protected void insertBasicProperties(ServletRequest request) { MDC.put(REQUEST_REMOTE_HOST_MDC_KEY, request.getRemoteHost()); if (request instanceof HttpServletRequest) { // from MDCInsertingServletFilter final HttpServletRequest httpServletRequest = (HttpServletRequest) request; MDC.put(REQUEST_METHOD, httpServletRequest.getMethod()); MDC.put(REQUEST_REQUEST_URI, httpServletRequest.getRequestURI()); final StringBuffer requestURL = httpServletRequest.getRequestURL(); if (requestURL != null) { MDC.put(REQUEST_REQUEST_URL, requestURL.toString()); } MDC.put(REQUEST_QUERY_STRING, httpServletRequest.getQueryString()); MDC.put(REQUEST_USER_AGENT_MDC_KEY, httpServletRequest.getHeader(USER_AGENT_HEADER)); MDC.put(REQUEST_X_FORWARDED_FOR, httpServletRequest.getHeader(X_FORWARDED_FOR_HEADER)); // additional attributes MDC.put(REQUEST_REQUEST_PATH, getRequestPath(httpServletRequest)); MDC.put(REQUEST_REMOTE_USER, httpServletRequest.getRemoteUser()); final HttpSession session = httpServletRequest.getSession(false); if (session != null) { MDC.put(REQUEST_SESSION_ID, session.getId()); } insertAdditionalProperties(httpServletRequest); } } /** * Override this method to insert additional properties into MDC. */ protected void insertAdditionalProperties(HttpServletRequest request) { } protected String getRequestPath(HttpServletRequest request) { final StringBuilder sb = new StringBuilder(); final String servletPath = request.getServletPath(); if (servletPath != null) { sb.append(servletPath); } final String pathInfo = request.getPathInfo(); if (pathInfo != null) { sb.append(pathInfo); } return sb.toString(); } }
package org.selfip.bkimmel.util.args; import java.lang.reflect.Field; import java.util.Queue; import org.selfip.bkimmel.util.UnexpectedException; /** * @author brad * */ public abstract class AbstractFieldOption<T> implements Command<T> { private final String fieldName; public AbstractFieldOption(String fieldName) { this.fieldName = fieldName; } /* (non-Javadoc) * @see org.selfip.bkimmel.util.args.Command#process(java.util.Queue, java.lang.Object) */ public final void process(Queue<String> argq, T state) { try { Field field = state.getClass().getField(fieldName); Object value = getOptionValue(argq); field.set(state, value); } catch (NoSuchFieldException e) { e.printStackTrace(); throw new UnexpectedException(e); } catch (IllegalAccessException e) { e.printStackTrace(); throw new UnexpectedException(e); } } protected abstract Object getOptionValue(Queue<String> argq); }
package com.sdicons.json.mapper.helper.impl; import com.sdicons.json.mapper.JSONMapper; import com.sdicons.json.mapper.MapperException; import com.sdicons.json.mapper.helper.SimpleMapperHelper; import com.sdicons.json.model.JSONObject; import com.sdicons.json.model.JSONValue; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; public class ObjectMapper implements SimpleMapperHelper { public Class getHelpedClass() { return Object.class; } public Object toJava(JSONValue aValue, Class aRequestedClass) throws MapperException { if(!aValue.isObject()) throw new MapperException("ObjectMapper cannot map: " + aValue.getClass().getName()); JSONObject aObject = (JSONObject) aValue; try { final Object lBean = aRequestedClass.newInstance(); for (String lPropname : aObject.getValue().keySet()) { // Fetch subelement information. final JSONValue lSubEl = aObject.get(lPropname); // Put the property in the bean. boolean lFoundWriter = false; PropertyDescriptor[] lPropDesc = Introspector.getBeanInfo(aRequestedClass, Introspector.USE_ALL_BEANINFO).getPropertyDescriptors(); for (PropertyDescriptor aLPropDesc : lPropDesc) { if (aLPropDesc.getName().equals(lPropname)) { lFoundWriter = true; final Method lWriter = aLPropDesc.getWriteMethod(); if (lWriter == null) { //Ignore the case of no setter final String lMsg = "Could not find a setter for prop: " + lPropname + " in class: " + aRequestedClass; System.out.println("WARNING:"+lMsg); break; //throw new MapperException(lMsg); } else { Object lProp; Type[] lTypes = lWriter.getGenericParameterTypes(); if (lTypes.length == 1 && (lTypes[0] instanceof ParameterizedType)) { // We can make use of the extra type information of the parameter of the // seter. This extra type information can be exploited by the mapper // to produce a more fine grained mapping. lProp = JSONMapper.toJava(lSubEl, (ParameterizedType) lTypes[0]); } else { // We cannot use extra type information, we fall back on the // raw class information. lProp = JSONMapper.toJava(lSubEl, aLPropDesc.getPropertyType()); } lWriter.invoke(lBean, lProp); } break; } } if (!lFoundWriter) { final String lMsg = "Could not find a setter for prop: " + lPropname + " in class: " + aRequestedClass; throw new MapperException(lMsg); } } return lBean; } catch (IllegalAccessException e) { final String lMsg = "IllegalAccessException while trying to instantiate bean: " + aRequestedClass; throw new MapperException(lMsg); } catch (InstantiationException e) { final String lMsg = "InstantiationException while trying to instantiate bean: " + aRequestedClass; throw new MapperException(lMsg); } catch (IntrospectionException e) { final String lMsg = "IntrospectionException while trying to fill bean: " + aRequestedClass; throw new MapperException(lMsg); } catch (InvocationTargetException e) { final String lMsg = "InvocationTargetException while trying to fill bean: " + aRequestedClass; throw new MapperException(lMsg); } } public JSONValue toJSON(Object aPojo) throws MapperException { // We will render the bean properties as the elements of a JSON object. final JSONObject lElements = new JSONObject(); String lPropName=""; try { Class lClass = aPojo.getClass(); PropertyDescriptor[] lPropDesc = Introspector.getBeanInfo(lClass, Introspector.USE_ALL_BEANINFO).getPropertyDescriptors(); for (PropertyDescriptor aLPropDesc : lPropDesc) { final Method lReader = aLPropDesc.getReadMethod(); lPropName = aLPropDesc.getName(); // Only serialize if the property is READABLE if(lReader!=null&& (lReader.getReturnType().toString().contains("net.sf.cglib.proxy.Callback")|| lReader.getReturnType().toString().contains("org.hibernate.proxy.LazyInitializer"))){ continue; } // Ignore the getClass() for any objects if(lReader!=null&&lPropName.equals("class")){ continue; } if (lReader != null) { lElements.getValue().put(lPropName, JSONMapper.toJSON(lReader.invoke(aPojo))); } } return lElements; } catch(IntrospectionException e) { final String lMsg = "Error while introspecting JavaBean." + " Class: "+ aPojo.getClass(); throw new MapperException(lMsg); } catch(IllegalAccessException e) { final String lMsg = "Illegal access while trying to fetch a bean property (1).Property: " + lPropName + " Object: " + aPojo; throw new MapperException(lMsg); } catch(InvocationTargetException e) { final String lMsg = "Illegal access while trying to fetch a bean property (2).Property: " + lPropName + " Object: " + aPojo; throw new MapperException(lMsg); } } }
package com.lifeonwalden.ebmms.common.annotation; import java.lang.annotation.*; @Target({ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface TcpInject { /** * Assign a version number to this tcp service * The method annotation is higher priority than type annotation * * @return */ int version() default 0; /** * allowed maximum times of retry */ int maxRetryTimes() default 1; /** * the service interface class * * @return */ Class serviceInterface(); /** * Description */ String description() default ""; }
package com.appgyver.plugin; import org.apache.cordova.api.CordovaPlugin; import org.apache.cordova.api.CallbackContext; import org.apache.cordova.api.PluginResult; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; /** * This class echoes a string called from JavaScript. */ public class Echo extends CordovaPlugin { @Override public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { if (action.equals("echo")) { String message = args.getString(0); this.echo(message, callbackContext); return true; } return false; } private void echo(String message, CallbackContext callbackContext) { if (message != null && message.length() > 0) { callbackContext.success(message); } else { callbackContext.error("Expected one non-empty string argument."); } } }
package com.sri.ai.grinder.sgdpllt.library.bounds; import static com.sri.ai.expresso.helper.Expressions.apply; import static com.sri.ai.expresso.helper.Expressions.makeSymbol; import static com.sri.ai.expresso.helper.Expressions.parse; import static com.sri.ai.grinder.helper.GrinderUtil.getIndexExpressionsOfFreeVariablesIn; import static com.sri.ai.grinder.sgdpllt.library.FunctorConstants.AND; import static com.sri.ai.grinder.sgdpllt.library.FunctorConstants.EQUAL; import static com.sri.ai.grinder.sgdpllt.library.FunctorConstants.GREATER_THAN_OR_EQUAL_TO; import static com.sri.ai.grinder.sgdpllt.library.FunctorConstants.IF_THEN_ELSE; import static com.sri.ai.grinder.sgdpllt.library.FunctorConstants.IN; import static com.sri.ai.grinder.sgdpllt.library.FunctorConstants.PLUS; import static com.sri.ai.grinder.sgdpllt.library.FunctorConstants.SUM; import static com.sri.ai.grinder.sgdpllt.library.FunctorConstants.TIMES; import static com.sri.ai.grinder.sgdpllt.library.set.extensional.ExtensionalSets.getElements; import static com.sri.ai.grinder.sgdpllt.library.set.extensional.ExtensionalSets.removeNonDestructively; import static com.sri.ai.util.Util.in; import static com.sri.ai.util.Util.mapIntoArrayList; import static com.sri.ai.util.Util.println; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import com.sri.ai.expresso.api.Expression; import com.sri.ai.expresso.api.ExtensionalSet; import com.sri.ai.expresso.api.IndexExpressionsSet; import com.sri.ai.expresso.api.IntensionalSet; import com.sri.ai.expresso.core.DefaultExistentiallyQuantifiedFormula; import com.sri.ai.expresso.core.DefaultExtensionalUniSet; import com.sri.ai.expresso.core.DefaultSymbol; import com.sri.ai.expresso.core.ExtensionalIndexExpressionsSet; //import com.sri.ai.expresso.helper.Expressions; import com.sri.ai.grinder.sgdpllt.anytime.Model; import com.sri.ai.grinder.sgdpllt.api.Context; import com.sri.ai.grinder.sgdpllt.api.Theory; import com.sri.ai.grinder.sgdpllt.core.TrueContext; //import com.sri.ai.grinder.sgdpllt.library.FunctorConstants; import com.sri.ai.grinder.sgdpllt.library.set.extensional.ExtensionalSets; import com.sri.ai.grinder.sgdpllt.theory.compound.CompoundTheory; import com.sri.ai.grinder.sgdpllt.theory.differencearithmetic.DifferenceArithmeticTheory; import com.sri.ai.grinder.sgdpllt.theory.equality.EqualityTheory; import com.sri.ai.grinder.sgdpllt.theory.linearrealarithmetic.LinearRealArithmeticTheory; import com.sri.ai.grinder.sgdpllt.theory.propositional.PropositionalTheory; import com.sri.ai.grinder.sgdpllt.theory.tuple.TupleTheory; import com.sri.ai.util.base.NullaryFunction; import com.sri.ai.util.collect.CartesianProductIterator; import com.sri.ai.util.collect.ManyToManyRelation; public class Bounds { // a bound is a set of expressions representing its extreme points static boolean debug = false; public static Expression simplex(List<Expression> Variables, Model model){ ArrayList<Expression> simplexList = new ArrayList<>(); Expression one = makeSymbol("1"); Expression zero= makeSymbol("0"); for(Expression var : Variables){ Expression values = model.getValues(var); //TODO getValues should return the right //Expression rather than a string to be parsed. //By the way, that expression should represent a UniSet List<Expression> listOfValues = getElements(values); for (Expression value : listOfValues){ simplexList.add(apply(IF_THEN_ELSE, apply(EQUAL, var, value), one, zero)); } //simplexList.add(apply(IF_THEN_ELSE, apply(EQUAL, var, true ), one, zero)); //simplexList.add(apply(IF_THEN_ELSE, apply(EQUAL, var, false), one, zero)); } Expression result = new DefaultExtensionalUniSet(simplexList); return result; } /** * Assumes that each element of the bound is a factor with the same domain * Normalizes each factor of the bound. In latex notation: * {\phi/sum_{var(\phi)}\phi : \phi in bound} * @param bound * @param theory * @param context * @return bound of normalized factors */ public static Expression normalize(Expression bound, Theory theory, Context context){ List<Expression> listOfBound = ExtensionalSets.getElements(bound); if(listOfBound.size() == 0){ return null; } Expression phi = makeSymbol("phi"); Expression phi1 = listOfBound.get(0); IndexExpressionsSet indices = getIndexExpressionsOfFreeVariablesIn(bound, context); println(indices); Expression noCondition = makeSymbol(true); Expression setOfFactorInstantiations = IntensionalSet.makeMultiSet( indices, phi,//head noCondition); Expression sumOnPhi = apply(SUM, setOfFactorInstantiations); Expression f = apply("/", phi, sumOnPhi); Expression result = applyFunctionToBound(f, phi, bound, theory, context); return result; } /** * Computes the product of each term of a list of bounds * @param theory * @param context * @param listOfBounds * @return bound resulting from the product of bounds */ public static Expression boundProduct(Theory theory, Context context, Expression...listOfBounds){ ArrayList<NullaryFunction<Iterator<Expression>>> iteratorForBoundList = mapIntoArrayList(listOfBounds, bound -> () -> getElements(bound).iterator()); Iterator<ArrayList<Expression>> cartesianProduct = new CartesianProductIterator<Expression>(iteratorForBoundList); ArrayList<Expression> resultList = new ArrayList<>(); for (ArrayList<Expression> element : in(cartesianProduct)){ Expression product = apply("*",element); Expression evaluation = theory.evaluate(product,context); resultList.add(evaluation); } Expression result = new DefaultExtensionalUniSet(resultList); // Updating extreme points result = updateExtremes(result, theory, context); return result; } /*public static Expression boundProduct(Theory theory, Context context, Expression...listOfBounds){ if(listOfBounds.length == 0){ return null; } Expression result= boundProduct (0, theory, context, listOfBounds); return result; } private static Expression boundProduct(int i, Theory theory, Context context, Expression...listOfBounds){ if(listOfBounds.length - 1 == i){ return listOfBounds[i]; } Expression productOfOthers = boundProduct(i + 1, theory, context, listOfBounds); Expression b = listOfBounds[i]; List<Expression> listOfb = ExtensionalSet.getElements(b); List<Expression> listOfProductOfOthers = ExtensionalSet.getElements(productOfOthers); ArrayList<Expression> elements = new ArrayList<>(listOfb.size()*listOfProductOfOthers.size()); for (Expression phi1 : listOfb){ for (Expression phi2 : listOfProductOfOthers){ Expression product = apply("*",phi1,phi2); Expression evaluation = theory.evaluate(product,context); elements.add(evaluation); } } DefaultExtensionalUniSet productBound = new DefaultExtensionalUniSet(elements); //Updating extreme points Expression result = updateExtremes(productBound,theory,context); return result; }*/ public static Expression applyFunctionToBound(Expression f, Expression variableName, Expression b, Theory theory, Context context){ ExtensionalSet bAsExtensionalSet = (ExtensionalSet) b; int numberOfExtremes = bAsExtensionalSet.getArguments().size(); ArrayList<Expression> elements = new ArrayList<>(numberOfExtremes); for(Expression phi : ExtensionalSets.getElements(bAsExtensionalSet)){ Expression substitution = f.replaceAllOccurrences(variableName, phi, context); //debuging if (debug) println("evaluating: " + substitution); Expression evaluation = theory.evaluate(substitution, context); // problem in evaluation method... //debuging if (debug) println("result: " + evaluation); elements.add(evaluation); } DefaultExtensionalUniSet fOfb = new DefaultExtensionalUniSet(elements); //Updating extreme points Expression result = updateExtremes(fOfb,theory,context); return result; } /** * Eliminate factors not in Ext(C.Hull(B)) * @param B * @return */ private static Expression updateExtremes(Expression B,Theory theory, Context context){ List<Expression> listOfB = getElements(B); ArrayList<Expression> elements = new ArrayList<>(listOfB.size()); int indexPhi = 0; for(Expression phi : listOfB){ if (isExtremePoint(phi,indexPhi,B,theory,context)){ elements.add(phi); } indexPhi++; } DefaultExtensionalUniSet result = new DefaultExtensionalUniSet(elements); return result; } /** * Checks if \phi is a convex combination of the elements in bound * @param phi * factor * @param bound * @return */ public static boolean isExtremePoint(Expression phi,int indexPhi, Expression bound, Theory theory, Context context){ //TODO Expression boundWithoutPhi = removeNonDestructively(bound, indexPhi);//caro pq recopia a lista toda List<Expression> listOfB = getElements(boundWithoutPhi); int n = listOfB.size(); Expression[] c = new Expression[n]; for(int i = 0;i<n;i++){ c[i] = makeSymbol("c" + i); context = context.extendWithSymbolsAndTypes("c" + i,"Real"); } // 0<=ci<=1 ArrayList<Expression> listOfC = new ArrayList<>(listOfB.size()); for(int i = 0;i<n;i++){ Expression cibetwen0And1 = apply(AND,apply(GREATER_THAN_OR_EQUAL_TO,1,c[i]), apply(GREATER_THAN_OR_EQUAL_TO,c[i],0) ); listOfC.add(cibetwen0And1); } Expression allcibetwen0And1 = apply(AND, listOfC); //sum over ci =1 listOfC = new ArrayList<>(Arrays.asList(c)); Expression sumOverCiEqualsOne = apply(EQUAL,1,apply(PLUS,listOfC)); //sum of ci*phi1 = phi ArrayList<Expression> prodciphii = new ArrayList<>(listOfB.size()); int i = 0; for(Expression phii : listOfB){ prodciphii.add(apply(TIMES,phii,c[i])); i++; } Expression convexSum = apply(EQUAL,phi,apply(PLUS, prodciphii)); ArrayList<Expression> listOfCiInReal = new ArrayList<>(listOfB.size()); for(i = 0; i <n; i++){ listOfCiInReal.add(apply(IN,c[i],"Real")); } IndexExpressionsSet thereExistsCiInReal = new ExtensionalIndexExpressionsSet(listOfCiInReal); Expression body = apply(AND, allcibetwen0And1, sumOverCiEqualsOne, convexSum); Expression isExtreme = new DefaultExistentiallyQuantifiedFormula(thereExistsCiInReal,body); if (debug) println(isExtreme); //Expression result = theory.evaluate(isExtreme, context); return true; } public static void main(String[] args) { Theory theory = new CompoundTheory( new EqualityTheory(false, true), new DifferenceArithmeticTheory(false, false), new LinearRealArithmeticTheory(false, false), new TupleTheory(), new PropositionalTheory()); Context context = new TrueContext(theory); context = context.extendWithSymbolsAndTypes("X","Boolean"); context = context.extendWithSymbolsAndTypes("Y","Boolean"); context = context.extendWithSymbolsAndTypes("A","Boolean"); context = context.extendWithSymbolsAndTypes("B","Boolean"); //Set of numbers Expression one = DefaultSymbol.createSymbol(1); Expression two = DefaultSymbol.createSymbol(2); Expression three = DefaultSymbol.createSymbol(3); Expression setOFNumbers = ExtensionalSets.makeUniSet(one, two, three); //Set of functions Expression phi1 = parse("if X = true then 1 else if Y = true then 2 else 3"); Expression phi2 = parse("if X = true then if Y = true then 4 else 5 else 6"); Expression phi3 = parse("if A = true then 7 else if B = true then 8 else 9"); Expression phi4 = parse("if X = true then 10 else if Y = true then 11 else 12"); Expression setOfFactors = ExtensionalSets.makeUniSet(phi1, phi2, phi3, phi4); Bounds.normalize(setOfFactors, theory, context).toString(); } }
package com.sybit.r750explorer.controller; import com.sybit.r750explorer.exception.FrageException; import com.sybit.r750explorer.exception.FrageNotFoundException; import com.sybit.r750explorer.exception.MailException; import com.sybit.r750explorer.repository.tables.Fragen; import com.sybit.r750explorer.repository.tables.Location; import com.sybit.r750explorer.service.LocationService; import com.sybit.r750explorer.service.MailService; import com.sybit.r750explorer.service.QuizService; import com.sybit.r750explorer.service.ScoreService; import java.io.Serializable; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; import org.springframework.web.servlet.mvc.support.RedirectAttributes; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; @Controller @RequestMapping("/location/{slug}") public class QuizController implements Serializable { private final org.slf4j.Logger log = LoggerFactory.getLogger(this.getClass()); @Autowired private LocationService locationService; @Autowired private QuizService quizService; @Autowired private ScoreService scoreService; @Autowired private MailService mailService; @ModelAttribute("check") public boolean checkLocation(@CookieValue("UUID") String uuid, @PathVariable("slug") String slug) { log.debug("--> checkLocation"); Location loc = locationService.getLocation(slug); List<Location> visited = locationService.getVisitedLocations(uuid); for (Location l : visited) { if (l.getName().equals(loc.getName())) { return false; } } return true; } /** * Code Entry Page * * Method to show code entry page * * @param slug URL-Part of Location * @param model Model to add data to web page * @param attributes * @return */ @RequestMapping(value = "/code") public String code(@PathVariable("slug") String slug, Map<String, Object> model, RedirectAttributes attributes) { log.debug("--> CodePage"); if (!(boolean) model.get("check")) { attributes.addFlashAttribute("message", "Sie wurden auf die Homeseite umgeleitet!"); return "redirect:" + "/"; } Location loc = locationService.getLocation(slug); model.put("location", loc); return "codeproof"; } @RequestMapping(value = "/quiz") public String quiz(@CookieValue("UUID") String uuid, HttpServletRequest request, @RequestParam boolean mail, @RequestParam String code, @PathVariable("slug") String slug, Map<String, Object> model, RedirectAttributes attributes) { log.debug("--> CodePage"); if (!(boolean) model.get("check")) { attributes.addFlashAttribute("message", "Sie wurden auf die Homeseite umgeleitet!"); return "redirect:" + "/"; } Location loc = locationService.getLocation(slug); HttpSession session = request.getSession(); Fragen frage = null; if (code.equalsIgnoreCase(locationService.getLocation(slug).getCode())) { if (mail && session.getAttribute("Location_Hint_report_" + slug) == null) { try { mailService.sendMessage(loc.getName() + ": " + "Code ist nicht auffindbar/lesbar. Bitte umgehend neu anbringen!", uuid); } catch (MailException ex) { log.error(ex.getMessage(), ex); } session.setAttribute("Location_Hint_report_" + slug, true); scoreService.newSpielstandEntry(uuid, null, null, "Hinweis", Float.valueOf(5)); } model.put("frage", questionInSession(slug, request)); model.put("location", locationService.getLocation(slug)); model.put("codeCheck", true); return "quiz"; } else { model.put("location", locationService.getLocation(slug)); model.put("codeCheck", false); if (codeEntryCounter(slug, request, uuid)) { model.put("maxEntries", true); } return "codeproof"; } } /** * Code counter * * Method to count entered code of location * * @param slug URL-Part of Location * @param request * @return */ private boolean codeEntryCounter(String slug, HttpServletRequest request, String uuid) { log.debug("--> codeEntryCounter"); boolean entriesFull = false; HttpSession session = request.getSession(); if (session != null) { if (session.getAttribute("Location_" + slug) != null) { String value = session.getAttribute("Location_" + slug).toString(); Integer counter = Integer.valueOf(value); log.debug("LocationCode: " + locationService.getLocation(slug).getCode() + ". Entries: " + counter); counter++; log.debug(counter + " Mal falsch eingegeben"); if (counter >= 10) { log.info("Code Eingabe gesperrt! UUID: " + uuid); entriesFull = true; } session.setAttribute("Location_" + slug, counter); } else { session.setAttribute("Location_" + slug, "0"); } } return entriesFull; } public Fragen questionInSession(String slug, HttpServletRequest request) { HttpSession session = request.getSession(); Fragen frage; try { frage = quizService.getFrageOfLocation(slug); if (session.getAttribute("Location_Quiz_" + slug) != null) { frage = quizService.getFrageOfID(session.getAttribute("Location_Quiz_" + slug).toString()); } else { session.setAttribute("Location_Quiz_" + slug, frage.getId()); } } catch (FrageException e) { frage = null; log.error(e.getMessage()); throw new FrageNotFoundException("Keine Frage zu LocationSlug: " + slug + "gefunden!"); } return frage; } /** * Quiz check * * Method to compare the selected answer with the correct answer of current * question * * @param scoreCookie Cookie-ID of the user * @param antwort Entered answer * @param fragenID Current question * @param slug URL-Part of Location * @param model Model to add data to web page * @param attributes * @return */ @RequestMapping(value = "/quiz/check", method = RequestMethod.POST) public String checkQuiz(@CookieValue("UUID") String scoreCookie, @RequestParam String antwort, @RequestParam String fragenID, @PathVariable("slug") String slug, Map<String, Object> model, RedirectAttributes attributes) { log.debug("--> QuizCheck"); if (!(boolean) model.get("check")) { attributes.addFlashAttribute("message", "Sie wurden auf die Homeseite umgeleitet!"); return "redirect:" + "/"; } Fragen frage = quizService.getFrageOfID(fragenID); Float score; if (frage.getLoesung().equals(Float.valueOf(antwort))) { model.put("loesung", true); score = Float.valueOf(10); } else { model.put("loesung", false); score = Float.valueOf(1); } scoreService.newSpielstandEntry(scoreCookie, locationService.getLocation(slug), fragenID, antwort, score); model.put("loesungText", frage.getLoesungText()); model.put("location", locationService.getLocation(slug)); return "quiz-check"; } }
package com.vampirehemophile.ghosts.gamestates; import javax.swing.JPanel; import javax.imageio.ImageIO; import javax.swing.event.MouseInputListener; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Color; import java.awt.image.BufferedImage; import java.awt.Dimension; import java.awt.event.MouseEvent; import java.io.File; import java.io.IOException; import java.util.Queue; import java.util.LinkedList; import com.vampirehemophile.ghosts.managers.BoardManager; import com.vampirehemophile.ghosts.entities.*; import com.vampirehemophile.ghosts.math.Coordinates; /** Main game state. */ public class PlayState extends State implements MouseInputListener { /** game state. */ private enum GameState { SETUP, PLAY } /** main game state's panel. */ @SuppressWarnings("serial") private class PlayPanel extends JPanel { // images private BufferedImage lightTile; private BufferedImage darkTile; private BufferedImage whiteGoodPawn; private BufferedImage whiteEvilPawn; private BufferedImage whiteNeutralPawn; private BufferedImage blackGoodPawn; private BufferedImage blackEvilPawn; private BufferedImage blackNeutralPawn; // mouse event queue public Queue<MouseEvent> eventQueue; private PlayState parent; private Board board; /** Constructs a PlayPanel object. */ public PlayPanel() throws IOException { super(); setPreferredSize(new Dimension(600, 620)); eventQueue = new LinkedList<>(); parent = PlayState.this; board = parent.bm.board(); lightTile = ImageIO.read(State.getResource("/images/lighttile.png")); darkTile = ImageIO.read(State.getResource("/images/darktile.png")); whiteGoodPawn = ImageIO.read(State.getResource("/images/white/goodpawn.png")); whiteEvilPawn = ImageIO.read(State.getResource("/images/white/evilpawn.png")); whiteNeutralPawn = ImageIO.read(State.getResource("/images/white/neutralpawn.png")); blackGoodPawn = ImageIO.read(State.getResource("/images/black/goodpawn.png")); blackEvilPawn = ImageIO.read(State.getResource("/images/black/evilpawn.png")); blackNeutralPawn = ImageIO.read(State.getResource("/images/black/neutralpawn.png")); } /** {@inheritDoc} */ @Override protected void paintComponent(Graphics g) { super.paintComponent(g); Graphics2D g2d = (Graphics2D) g; drawBoard(g2d); // Process mouse events MouseEvent e = eventQueue.poll(); String message = null; while (e != null) { switch (e.getID()) { case MouseEvent.MOUSE_PRESSED: break; case MouseEvent.MOUSE_RELEASED: break; case MouseEvent.MOUSE_CLICKED: break; case MouseEvent.MOUSE_ENTERED: break; case MouseEvent.MOUSE_EXITED: break; case MouseEvent.MOUSE_DRAGGED: break; case MouseEvent.MOUSE_MOVED: break; } drawString(message, g2d); e = eventQueue.poll(); } drawPawns(g2d); } /** * Draws the board. * * @param g2d the graphics object. */ protected void drawBoard(Graphics2D g2d) { boolean dark = true; for (int i = 0; i < 6; i++) { for (int j = 0; j < 6; j++) { if (dark) { g2d.drawImage(darkTile, i*100, j*100, 100, 100, null); } else { g2d.drawImage(lightTile, i*100, j*100, 100, 100, null); } dark = !dark; } dark = !dark; } } /** * Draws the pawn set on the board. * * @param g2d the graphics object. */ protected void drawPawns(Graphics2D g2d) { Pawn pawn; for (int x = 0; x < board.size(); x++) { for (int y = 0; y < board.size(); y++) { pawn = board.at(new Coordinates(x, y, board.size())); if (pawn == null) { } else if (pawn.player().equals(white)) { switch (pawn.type()) { case GOOD: g2d.drawImage(whiteGoodPawn, x*100 + 25 + 12, y*100 + 25, 25, 50, null); break; case EVIL: g2d.drawImage(whiteEvilPawn, x*100 + 25 + 12, y*100 + 25, 25, 50, null); break; case UNKNOWN: g2d.drawImage(whiteNeutralPawn, x*100 + 25 + 12, y*100 + 25, 25, 50, null); break; } } else if (pawn.player().equals(black)) { switch (pawn.type()) { case GOOD: g2d.drawImage(blackGoodPawn, x*100 + 25 + 12, y*100 + 25, 25, 50, null); break; case EVIL: g2d.drawImage(blackEvilPawn, x*100 + 25 + 12, y*100 + 25, 25, 50, null); break; case UNKNOWN: g2d.drawImage(blackNeutralPawn, x*100 + 25 + 12, y*100 + 25, 25, 50, null); break; } } } } } /** * Draw a message. * * @param string the message. * @param g2d the graphics object. */ protected void drawString(String string, Graphics2D g2d) { g2d.drawString(string != null ? string : "", 10, 615); } } /** states panel. */ private PlayPanel panel; private GameState state; private BoardManager bm; private Player white; private Player black; private Player current; /** Constructs a PlayState object. */ public PlayState() { super(); state = GameState.SETUP; white = new Player(); black = new Player(); bm = new BoardManager(white, black); Board board = bm.board(); try { panel = new PlayPanel(); panel.addMouseListener(this); panel.addMouseMotionListener(this); } catch (IOException e) { System.out.println(e.getMessage()); System.exit(1); } } /** {@inheritDoc} */ @Override public JPanel render() { return panel; } // Mouse event handling /** * Push mouse event to the panel event queue. * * @param e an event. */ public void pushEvent(MouseEvent e) { panel.eventQueue.add(e); panel.repaint(); } /** {@inheritDoc} */ @Override public void mouseDragged(MouseEvent e) { pushEvent(e); } /** {@inheritDoc} */ @Override public void mouseMoved(MouseEvent e) { pushEvent(e); } /** {@inheritDoc} */ @Override public void mouseClicked (MouseEvent e) { pushEvent(e); } /** {@inheritDoc} */ @Override public void mouseEntered (MouseEvent e) { pushEvent(e); } /** {@inheritDoc} */ @Override public void mouseExited (MouseEvent e) { pushEvent(e); } /** {@inheritDoc} */ @Override public void mousePressed (MouseEvent e) { pushEvent(e); } /** {@inheritDoc} */ @Override public void mouseReleased (MouseEvent e) { pushEvent(e); } }
package com.wizzardo.http.framework.di; import com.wizzardo.tools.cache.Cache; import com.wizzardo.tools.cache.Computable; import java.lang.annotation.Annotation; import java.lang.reflect.Modifier; import java.util.HashMap; import java.util.List; import java.util.Map; public class DependencyFactory { private List<Class> classes; private Map<Class, Class> mapping = new HashMap<Class, Class>(); @SuppressWarnings("unchecked") private Cache<Class, Dependency> dependencies = new Cache<>(0, new Computable<Class, Dependency>() { @Override public Dependency compute(Class clazz) { Injectable injectable = (Injectable) getAnnotation(clazz, Injectable.class); if (injectable != null) return injectable.scope().createDependency(clazz); if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) { Class implementation = mapping.get(clazz); if (implementation == null) { for (Class cl : classes) { if (clazz.isAssignableFrom(cl) && !Modifier.isInterface(cl.getModifiers()) && !Modifier.isAbstract(cl.getModifiers()) && (injectable = (Injectable) getAnnotation(cl, Injectable.class)) != null ) { if (implementation != null) { throw new IllegalStateException("can't resolve dependency '" + clazz + "'. Found more than one implementation: " + implementation + " and " + cl); } implementation = cl; } } } if (implementation != null) { if (injectable == null) injectable = (Injectable) getAnnotation(implementation, Injectable.class); if (injectable != null) return injectable.scope().createDependency(implementation); else return new PrototypeDependency(implementation); } } if (Service.class.isAssignableFrom(clazz)) { return new SingletonDependency(clazz); } throw new IllegalStateException("can't create dependency-holder for class: " + clazz); } }); static Annotation getAnnotation(Class clazz, Class annotation) { while (clazz != null) { Annotation a = clazz.getAnnotation(annotation); if (a != null) { return a; } for (Class implemented : clazz.getInterfaces()) { a = getAnnotation(implemented, annotation); if (a != null) return a; } clazz = clazz.getSuperclass(); } return null; } private static class DependencyFactoryHolder { private static final DependencyFactory instance = new DependencyFactory(); } public static <T> T getDependency(Class<T> clazz) { return DependencyFactoryHolder.instance.resolve(clazz); } public static <T> T get(Class<T> clazz) { return DependencyFactoryHolder.instance.resolve(clazz); } public static DependencyFactory get() { return DependencyFactoryHolder.instance; } @SuppressWarnings("unchecked") private <T> T resolve(Class<T> clazz) { return (T) dependencies.get(clazz).get(); } public void setClasses(List<Class> classes) { this.classes = classes; } public void bind(Class abstractClass, Class implementation) { mapping.put(abstractClass, implementation); dependencies.get(abstractClass); } public <T> void register(Class<T> clazz, Dependency<T> dependency) { dependencies.put(clazz, dependency); } public boolean contains(Class clazz) { return dependencies.contains(clazz); } }
package crazypants.enderio.conduit.facade; import net.minecraft.block.Block; import net.minecraft.client.renderer.RenderBlocks; import net.minecraft.init.Blocks; import net.minecraft.item.ItemStack; import net.minecraftforge.client.IItemRenderer; import org.lwjgl.opengl.GL11; import crazypants.enderio.EnderIO; import crazypants.enderio.machine.painter.PainterUtil; import crazypants.render.RenderUtil; public class FacadeRenderer implements IItemRenderer { @Override public boolean handleRenderType(ItemStack item, ItemRenderType type) { return type == ItemRenderType.ENTITY || type == ItemRenderType.EQUIPPED || type == ItemRenderType.INVENTORY || type == ItemRenderType.EQUIPPED_FIRST_PERSON; } @Override public boolean shouldUseRenderHelper(ItemRenderType type, ItemStack item, ItemRendererHelper helper) { return true; } @Override public void renderItem(ItemRenderType type, ItemStack item, Object... data) { if(type == ItemRenderType.INVENTORY) { RenderBlocks renderBlocks = (RenderBlocks) data[0]; renderToInventory(item, renderBlocks); } else if(type == ItemRenderType.EQUIPPED || type == ItemRenderType.EQUIPPED_FIRST_PERSON) { renderEquipped(item, (RenderBlocks) data[0]); } else if(type == ItemRenderType.ENTITY) { renderEntity(item, (RenderBlocks) data[0]); } else { System.out.println("FacadeRenderer.renderItem: Unsupported render type"); } } private void renderEntity(ItemStack item, RenderBlocks renderBlocks) { GL11.glPushMatrix(); GL11.glScalef(0.5f, 0.5f, 0.5f); renderToInventory(item, renderBlocks); GL11.glPopMatrix(); } private void renderEquipped(ItemStack item, RenderBlocks renderBlocks) { renderToInventory(item, renderBlocks); } private void renderToInventory(ItemStack item, RenderBlocks renderBlocks) { Block block = PainterUtil.getSourceBlock(item); if(block != null) { // Render the facade block RenderUtil.bindBlockTexture(); if("appeng.block.solids.BlockSkyStone".equals(block.getClass().getName())) { //Yes, this is a horrible hack, but stumped as to why it is rendered invisible if this isn't done. renderBlocks.setOverrideBlockTexture(block.getIcon(0, PainterUtil.getSourceBlockMetadata(item))); renderBlocks.renderBlockAsItem(Blocks.stone, 0, 1.0F); } else { renderBlocks.renderBlockAsItem(block, PainterUtil.getSourceBlockMetadata(item), 1.0F); } // then the 'overlay' that marks it as a facade GL11.glDepthFunc(GL11.GL_LEQUAL); GL11.glDisable(GL11.GL_LIGHTING); GL11.glDepthMask(false); GL11.glEnable(GL11.GL_BLEND); GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA); GL11.glEnable(GL11.GL_POLYGON_OFFSET_FILL); GL11.glPolygonOffset(-1.0f, -1.0f); RenderUtil.bindItemTexture(); renderBlocks.setOverrideBlockTexture(EnderIO.itemConduitFacade.getOverlayIcon()); renderBlocks.renderBlockAsItem(Blocks.stone, item.getItemDamage(), 1.0F); GL11.glDisable(GL11.GL_POLYGON_OFFSET_FILL); renderBlocks.clearOverrideBlockTexture(); GL11.glDisable(GL11.GL_BLEND); GL11.glDepthMask(true); GL11.glEnable(GL11.GL_LIGHTING); GL11.glDepthFunc(GL11.GL_LEQUAL); } else { renderBlocks.setOverrideBlockTexture(EnderIO.itemConduitFacade.getIconFromDamage(0)); renderBlocks.renderBlockAsItem(Blocks.stone, 0, 1.0F); renderBlocks.clearOverrideBlockTexture(); } } }
package edu.wustl.catissuecore.bizlogic; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.ListIterator; import java.util.Set; import java.util.Vector; import edu.wustl.catissuecore.action.DomainObjectListAction; import edu.wustl.catissuecore.dao.DAO; import edu.wustl.catissuecore.domain.CancerResearchGroup; import edu.wustl.catissuecore.domain.Department; import edu.wustl.catissuecore.domain.Institution; import edu.wustl.catissuecore.domain.User; import edu.wustl.catissuecore.util.EmailHandler; import edu.wustl.catissuecore.util.PasswordManager; import edu.wustl.catissuecore.util.Roles; import edu.wustl.catissuecore.util.global.Constants; import edu.wustl.common.beans.NameValueBean; import edu.wustl.common.beans.SecurityDataBean; import edu.wustl.common.beans.SessionDataBean; import edu.wustl.common.domain.AbstractDomainObject; import edu.wustl.common.security.SecurityManager; import edu.wustl.common.security.exceptions.SMException; import edu.wustl.common.security.exceptions.UserNotAuthorizedException; import edu.wustl.common.util.dbManager.DAOException; import edu.wustl.common.util.global.ApplicationProperties; import edu.wustl.common.util.global.Validator; import edu.wustl.common.util.logger.Logger; import gov.nih.nci.security.authorization.domainobjects.Role; /** * UserBizLogic is used to add user information into the database using Hibernate. * @author kapil_kaveeshwar */ public class UserBizLogic extends DefaultBizLogic { /** * Saves the user object in the database. * @param obj The user object to be saved. * @param session The session in which the object is saved. * @throws DAOException */ protected void insert(Object obj, DAO dao, SessionDataBean sessionDataBean) throws DAOException, UserNotAuthorizedException { User user = (User) obj; gov.nih.nci.security.authorization.domainobjects.User csmUser = new gov.nih.nci.security.authorization.domainobjects.User(); try { List list = dao.retrieve(Department.class.getName(),Constants.SYSTEM_IDENTIFIER, user.getDepartment().getSystemIdentifier()); Department department = null; if (list.size() != 0) { department = (Department) list.get(0); } list = dao.retrieve(Institution.class.getName(),Constants.SYSTEM_IDENTIFIER, user.getInstitution().getSystemIdentifier()); Institution institution = null; if (list.size() != 0) { institution = (Institution) list.get(0); } list = dao.retrieve(CancerResearchGroup.class.getName(),Constants.SYSTEM_IDENTIFIER, user.getCancerResearchGroup().getSystemIdentifier()); CancerResearchGroup cancerResearchGroup = null; if (list.size() != 0) { cancerResearchGroup = (CancerResearchGroup) list.get(0); } user.setDepartment(department); user.setInstitution(institution); user.setCancerResearchGroup(cancerResearchGroup); // If the page is of signup user don't create the csm user. if (user.getPageOf().equals(Constants.PAGEOF_SIGNUP) == false) { csmUser.setLoginName(user.getLoginName()); csmUser.setLastName(user.getLastName()); csmUser.setFirstName(user.getFirstName()); csmUser.setEmailId(user.getEmailAddress()); csmUser.setStartDate(user.getStartDate()); csmUser.setPassword(PasswordManager.encode(PasswordManager.generatePassword())); SecurityManager.getInstance(UserBizLogic.class).createUser(csmUser); if (user.getRoleId() != null) { SecurityManager.getInstance(UserBizLogic.class) .assignRoleToUser(csmUser.getUserId().toString(),user.getRoleId()); } user.setCsmUserId(csmUser.getUserId()); user.setPassword(csmUser.getPassword()); } // Create address and the user in catissue tables. dao.insert(user.getAddress(), sessionDataBean, true, false); dao.insert(user, sessionDataBean, true, false); Set protectionObjects = new HashSet(); protectionObjects.add(user); EmailHandler emailHandler = new EmailHandler(); // Send the user registration email to user and the administrator. if (Constants.PAGEOF_SIGNUP.equals(user.getPageOf())) { SecurityManager.getInstance(this.getClass()).insertAuthorizationData( null, protectionObjects, null); emailHandler.sendUserSignUpEmail(user); } else// Send the user creation email to user and the administrator. { SecurityManager.getInstance(this.getClass()).insertAuthorizationData( getAuthorizationData(user), protectionObjects, null); emailHandler.sendApprovalEmail(user); } } catch(DAOException daoExp) { Logger.out.debug(daoExp.getMessage(), daoExp); deleteCSMUser(csmUser); throw daoExp; } catch (SMException e) { // added to format constrainviolation message deleteCSMUser(csmUser); throw handleSMException(e); } } /** * Deletes the csm user from the csm user table. * @param csmUser The csm user to be deleted. * @throws DAOException */ private void deleteCSMUser(gov.nih.nci.security.authorization.domainobjects.User csmUser) throws DAOException { try { if (csmUser.getUserId() != null) { SecurityManager.getInstance(ApproveUserBizLogic.class) .removeUser(csmUser.getUserId().toString()); } } catch(SMException smExp) { throw handleSMException(smExp); } } /** * This method returns collection of UserGroupRoleProtectionGroup objects that speciefies the * user group protection group linkage through a role. It also specifies the groups the protection * elements returned by this class should be added to. * @return */ private Vector getAuthorizationData(AbstractDomainObject obj) throws SMException { Logger.out.debug(" Vector authorizationData = new Vector(); Set group = new HashSet(); User aUser = (User)obj; String userId = String.valueOf(aUser.getCsmUserId()); gov.nih.nci.security.authorization.domainobjects.User user = SecurityManager.getInstance(this.getClass()).getUserById(userId); Logger.out.debug(" User: "+user.getLoginName()); group.add(user); // Protection group of User String protectionGroupName = Constants.getUserPGName(aUser.getSystemIdentifier()); SecurityDataBean userGroupRoleProtectionGroupBean = new SecurityDataBean(); userGroupRoleProtectionGroupBean.setUser(userId); userGroupRoleProtectionGroupBean.setRoleName(Roles.UPDATE_ONLY); userGroupRoleProtectionGroupBean.setGroupName(Constants.getUserGroupName(aUser.getSystemIdentifier())); userGroupRoleProtectionGroupBean.setProtectionGroupName(protectionGroupName); userGroupRoleProtectionGroupBean.setGroup(group); authorizationData.add(userGroupRoleProtectionGroupBean); Logger.out.debug(authorizationData.toString()); return authorizationData; } /** * Updates the persistent object in the database. * @param obj The object to be updated. * @param session The session in which the object is saved. * @throws DAOException */ protected void update(DAO dao, Object obj, Object oldObj, SessionDataBean sessionDataBean) throws DAOException, UserNotAuthorizedException { User user = (User) obj; try { // Get the csm userId if present. String csmUserId = null; if (user.getCsmUserId() != null) { csmUserId = user.getCsmUserId().toString(); } gov.nih.nci.security.authorization.domainobjects.User csmUser = SecurityManager .getInstance(DomainObjectListAction.class).getUserById(csmUserId); // If the page is of change password, // update the password of the user in csm and catissue tables. if (user.getPageOf().equals(Constants.PAGEOF_CHANGE_PASSWORD)) { if (!user.getOldPassword().equals(PasswordManager.decode(csmUser.getPassword()))) { throw new DAOException(ApplicationProperties.getValue("errors.oldPassword.wrong")); } csmUser.setPassword(PasswordManager.encode(user.getPassword())); user.setPassword(csmUser.getPassword()); } else { csmUser.setLoginName(user.getLoginName()); csmUser.setLastName(user.getLastName()); csmUser.setFirstName(user.getFirstName()); csmUser.setEmailId(user.getEmailAddress()); // Assign Role only if the page is of Administrative user edit. if ((Constants.PAGEOF_USER_PROFILE.equals(user.getPageOf()) == false) && (Constants.PAGEOF_CHANGE_PASSWORD.equals(user.getPageOf()) == false)) { SecurityManager.getInstance(UserBizLogic.class).assignRoleToUser( csmUser.getUserId().toString(), user.getRoleId()); } dao.update(user.getAddress(), sessionDataBean, true, false, false); //Audit of user address. User oldUser = (User) oldObj; dao.audit(user.getAddress(), oldUser.getAddress(),sessionDataBean,true); } // Modify the csm user. SecurityManager.getInstance(UserBizLogic.class).modifyUser(csmUser); dao.update(user, sessionDataBean, true, true, true); //Audit of user. dao.audit(obj, oldObj,sessionDataBean,true); if (Constants.ACTIVITY_STATUS_ACTIVE.equals(user.getActivityStatus())) { Set protectionObjects=new HashSet(); protectionObjects.add(user); SecurityManager.getInstance(this.getClass()).insertAuthorizationData( getAuthorizationData(user), protectionObjects, null); } } catch (SMException e) { throw handleSMException(e); } } /** * Returns the list of NameValueBeans with name as "LastName,Firstname" * and value as systemtIdentifier, of all users who are not disabled. * @return the list of NameValueBeans with name as "LastName,Firstname" * and value as systemtIdentifier, of all users who are not disabled. * @throws DAOException */ public Vector getUsers() throws DAOException { String sourceObjectName = User.class.getName(); String[] selectColumnName = null; String[] whereColumnName = {Constants.ACTIVITY_STATUS,Constants.ACTIVITY_STATUS}; String[] whereColumnCondition = {"=","="}; Object[] whereColumnValue = {Constants.ACTIVITY_STATUS_ACTIVE,Constants.ACTIVITY_STATUS_CLOSED }; String joinCondition = Constants.OR_JOIN_CONDITION ; //Retrieve the users whose activity status is not disabled. List users = retrieve(sourceObjectName, selectColumnName, whereColumnName, whereColumnCondition, whereColumnValue, joinCondition); Vector nameValuePairs = new Vector(); nameValuePairs.add(new NameValueBean(Constants.SELECT_OPTION, String.valueOf(Constants.SELECT_OPTION_VALUE))); // If the list of users retrieved is not empty. if (users.isEmpty() == false) { // Creating name value beans. for (int i = 0; i < users.size(); i++) { User user = (User) users.get(i); NameValueBean nameValueBean = new NameValueBean(); nameValueBean.setName(user.getLastName() + ", " + user.getFirstName()); nameValueBean.setValue(String.valueOf(user .getSystemIdentifier())); Logger.out.debug(nameValueBean.toString() + " : " + user.getActivityStatus() ); nameValuePairs.add(nameValueBean); } } Collections.sort(nameValuePairs) ; return nameValuePairs; } /** * Returns the list of NameValueBeans with name as "LastName,Firstname" * and value as systemtIdentifier, of all users who are not disabled. * @return the list of NameValueBeans with name as "LastName,Firstname" * and value as systemtIdentifier, of all users who are not disabled. * @throws DAOException */ public Vector getCSMUsers() throws DAOException, SMException { //Retrieve the users whose activity status is not disabled. List users = SecurityManager.getInstance(UserBizLogic.class).getUsers(); Vector nameValuePairs = new Vector(); nameValuePairs.add(new NameValueBean(Constants.SELECT_OPTION, String.valueOf(Constants.SELECT_OPTION_VALUE))); // If the list of users retrieved is not empty. if (users.isEmpty() == false) { // Creating name value beans. for (int i = 0; i < users.size(); i++) { gov.nih.nci.security.authorization.domainobjects.User user = (gov.nih.nci.security.authorization.domainobjects.User) users.get(i); NameValueBean nameValueBean = new NameValueBean(); nameValueBean.setName(user.getLastName() + ", " + user.getFirstName()); nameValueBean.setValue(String.valueOf(user .getUserId())); Logger.out.debug(nameValueBean.toString()); nameValuePairs.add(nameValueBean); } } Collections.sort(nameValuePairs) ; return nameValuePairs; } /** * Returns a list of users according to the column name and value. * @param colName column name on the basis of which the user list is to be retrieved. * @param colValue Value for the column name. * @throws DAOException */ public List retrieve(String className, String colName, Object colValue) throws DAOException { List userList = null; try { // Get the caTISSUE user. userList = super.retrieve(className, colName, colValue); edu.wustl.catissuecore.domain.User appUser = null; if (!userList.isEmpty()) { appUser = (edu.wustl.catissuecore.domain.User) userList.get(0); if (appUser.getCsmUserId() != null) { //Get the role of the user. Role role = SecurityManager.getInstance(UserBizLogic.class) .getUserRole(appUser.getCsmUserId().longValue()); if (role != null) { appUser.setRoleId(role.getId().toString()); } } } } catch (SMException e) { throw handleSMException(e); } return userList; } /** * Retrieves and sends the login details email to the user whose email address is passed * else returns the error key in case of an error. * @param emailAddress the email address of the user whose password is to be sent. * @return the error key in case of an error. * @throws DAOException */ public String sendForgotPassword(String emailAddress) throws DAOException { String statusMessageKey = null; List list = retrieve(User.class.getName(), "emailAddress", emailAddress); if (!list.isEmpty()) { User user = (User) list.get(0); if (user.getActivityStatus().equals(Constants.ACTIVITY_STATUS_ACTIVE)) { EmailHandler emailHandler = new EmailHandler(); //Send the login details email to the user. boolean emailStatus = emailHandler.sendLoginDetailsEmail(user, null); if (emailStatus) { statusMessageKey = "password.send.success"; } else { statusMessageKey = "password.send.failure"; } } else { //Error key if the user is not active. statusMessageKey = "errors.forgotpassword.user.notApproved"; } } else { // Error key if the user is not present. statusMessageKey = "errors.forgotpassword.user.unknown"; } return statusMessageKey; } /** * Overriding the parent class's method to validate the enumerated attribute values */ protected boolean validate(Object obj, DAO dao, String operation) throws DAOException { User user = (User)obj; if (Constants.PAGEOF_CHANGE_PASSWORD.equals(user.getPageOf()) == false) { if(!Validator.isEnumeratedValue(Constants.STATEARRAY,user.getAddress().getState())) { throw new DAOException(ApplicationProperties.getValue("state.errMsg")); } if(!Validator.isEnumeratedValue(Constants.COUNTRYARRAY,user.getAddress().getCountry())) { throw new DAOException(ApplicationProperties.getValue("country.errMsg")); } if(Constants.PAGEOF_USER_ADMIN.equals(user.getPageOf())) { try { if(!Validator.isEnumeratedValue(getRoles(),user.getRoleId())) { throw new DAOException(ApplicationProperties.getValue("user.role.errMsg")); } } catch(SMException e) { throw handleSMException(e); } if(operation.equals(Constants.ADD)) { if(!Constants.ACTIVITY_STATUS_ACTIVE.equals(user.getActivityStatus())) { throw new DAOException(ApplicationProperties.getValue("activityStatus.active.errMsg")); } } else { if(!Validator.isEnumeratedValue(Constants.USER_ACTIVITY_STATUS_VALUES,user.getActivityStatus())) { throw new DAOException(ApplicationProperties.getValue("activityStatus.errMsg")); } } } } return true; } /** * Returns a list of all roles that can be assigned to a user. * @return a list of all roles that can be assigned to a user. * @throws SMException */ private List getRoles() throws SMException { //Sets the roleList attribute to be used in the Add/Edit User Page. Vector roleList = SecurityManager.getInstance(UserBizLogic.class).getRoles(); List roleNameValueBeanList = new ArrayList(); NameValueBean nameValueBean = new NameValueBean(); nameValueBean.setName(Constants.SELECT_OPTION); nameValueBean.setValue("-1"); roleNameValueBeanList.add(nameValueBean); ListIterator iterator = roleList.listIterator(); while (iterator.hasNext()) { Role role = (Role) iterator.next(); nameValueBean = new NameValueBean(); nameValueBean.setName(role.getName()); nameValueBean.setValue(String.valueOf(role.getId())); roleNameValueBeanList.add(nameValueBean); } return roleNameValueBeanList; } // //method to return a comma seperated list of emails of administrators of a particular institute // private String getInstitutionAdmins(Long instID) throws DAOException,SMException // String retStr=""; // String[] userEmail; // Long[] csmAdminIDs = SecurityManager.getInstance(UserBizLogic.class).getAllAdministrators() ; // if (csmAdminIDs != null ) // for(int cnt=0;cnt<csmAdminIDs.length ;cnt++ ) // String sourceObjectName = User.class.getName(); // String[] selectColumnName = null; // String[] whereColumnName = {"institution","csmUserId"}; // String[] whereColumnCondition = {"=","="}; // Object[] whereColumnValue = {instID, csmAdminIDs[cnt] }; // String joinCondition = Constants.AND_JOIN_CONDITION; // //Retrieve the users for given institution and who are administrators. // List users = retrieve(sourceObjectName, selectColumnName, whereColumnName, // whereColumnCondition, whereColumnValue, joinCondition); // if(!users.isEmpty() ) // User adminUser = (User)users.get(0); // retStr = retStr + "," + adminUser.getEmailAddress(); // Logger.out.debug(retStr); // retStr = retStr.substring(retStr.indexOf(",")+1 ); // Logger.out.debug(retStr); // return retStr; }
package crazypants.enderio.machine; import java.util.EnumMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import net.minecraft.block.Block; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.ISidedInventory; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.tileentity.TileEntity; import net.minecraft.world.World; import net.minecraftforge.common.util.ForgeDirection; import buildcraft.api.power.PowerHandler; import buildcraft.api.power.PowerHandler.PowerReceiver; import buildcraft.api.power.PowerHandler.Type; import crazypants.enderio.EnderIO; import crazypants.enderio.TileEntityEio; import crazypants.enderio.power.Capacitors; import crazypants.enderio.power.ICapacitor; import crazypants.enderio.power.IInternalPowerReceptor; import crazypants.enderio.power.PowerHandlerUtil; import crazypants.util.BlockCoord; import crazypants.util.ItemUtil; import crazypants.vecmath.VecmathUtil; public abstract class AbstractMachineEntity extends TileEntityEio implements ISidedInventory, IInternalPowerReceptor, IMachine, IRedstoneModeControlable { public short facing; // Client sync monitoring protected int ticksSinceSync = -1; protected boolean forceClientUpdate = true; protected boolean lastActive; protected float lastSyncPowerStored = -1; // Power protected Capacitors capacitorType; protected float storedEnergy; protected ItemStack[] inventory; protected final SlotDefinition slotDefinition; protected PowerHandler powerHandler; protected RedstoneControlMode redstoneControlMode; protected boolean redstoneCheckPassed; private boolean redstoneStateDirty = true; protected Map<ForgeDirection, IoMode> faceModes; private int[] allSlots; public AbstractMachineEntity(SlotDefinition slotDefinition, Type powerType) { this.slotDefinition = slotDefinition; facing = 3; capacitorType = Capacitors.BASIC_CAPACITOR; powerHandler = PowerHandlerUtil.createHandler(capacitorType.capacitor, this, powerType); inventory = new ItemStack[slotDefinition.getNumSlots()]; redstoneControlMode = RedstoneControlMode.IGNORE; allSlots = new int[slotDefinition.getNumSlots()]; for(int i=0;i<allSlots.length;i++) { allSlots[i] = i; } } public IoMode toggleIoModeForFace(ForgeDirection faceHit) { IoMode curMode = getIoMode(faceHit); IoMode mode = curMode.next(); while(!supportsMode(faceHit, mode)) { mode = mode.next(); } setIoMode(faceHit, mode); return mode; } public boolean supportsMode(ForgeDirection faceHit, IoMode mode) { return true; } public void setIoMode(ForgeDirection faceHit, IoMode mode) { if(mode == IoMode.NONE && faceModes == null) { return; } if(faceModes == null) { faceModes = new EnumMap<ForgeDirection, IoMode>(ForgeDirection.class); } faceModes.put(faceHit, mode); } public IoMode getIoMode(ForgeDirection face) { if(faceModes == null) { return IoMode.NONE; } IoMode res = faceModes.get(face); if(res == null) { return IoMode.NONE; } return res; } public BlockCoord getLocation() { return new BlockCoord(this); } public SlotDefinition getSlotDefinition() { return slotDefinition; } public boolean isValidUpgrade(ItemStack itemstack) { for (int i = slotDefinition.getMinUpgradeSlot(); i <= slotDefinition.getMaxUpgradeSlot(); i++) { if(isItemValidForSlot(i, itemstack)) { return true; } } return false; } public boolean isValidInput(ItemStack itemstack) { for (int i = slotDefinition.getMinInputSlot(); i <= slotDefinition.getMaxInputSlot(); i++) { if(isItemValidForSlot(i, itemstack)) { return true; } } return false; } public boolean isValidOutput(ItemStack itemstack) { for (int i = slotDefinition.getMinOutputSlot(); i <= slotDefinition.getMaxOutputSlot(); i++) { if(isItemValidForSlot(i, itemstack)) { return true; } } return false; } @Override public final boolean isItemValidForSlot(int i, ItemStack itemstack) { if(slotDefinition.isUpgradeSlot(i)) { return itemstack != null && itemstack.getItem() == EnderIO.itemBasicCapacitor && itemstack.getItemDamage() > 0; } return isMachineItemValidForSlot(i, itemstack); } protected abstract boolean isMachineItemValidForSlot(int i, ItemStack itemstack); public AbstractMachineEntity(SlotDefinition slotDefinition) { this(slotDefinition, Type.MACHINE); } @Override public RedstoneControlMode getRedstoneControlMode() { return redstoneControlMode; } @Override public void setRedstoneControlMode(RedstoneControlMode redstoneControlMode) { this.redstoneControlMode = redstoneControlMode; redstoneStateDirty = true; } public short getFacing() { return facing; } public void setFacing(short facing) { this.facing = facing; } public abstract boolean isActive(); public abstract float getProgress(); public int getProgressScaled(int scale) { int result = (int) (getProgress() * scale); return result; } public boolean hasPower() { return storedEnergy > 0; } public ICapacitor getCapacitor() { return capacitorType.capacitor; } public int getEnergyStoredScaled(int scale) { // NB: called on the client so can't use the power provider return VecmathUtil.clamp(Math.round(scale * (storedEnergy / capacitorType.capacitor.getMaxEnergyStored())), 0, scale); } public float getEnergyStored() { return storedEnergy; } public void setCapacitor(Capacitors capacitorType) { this.capacitorType = capacitorType; PowerHandlerUtil.configure(powerHandler, capacitorType.capacitor); forceClientUpdate = true; } @Override public void doWork(PowerHandler workProvider) { } @Override public PowerReceiver getPowerReceiver(ForgeDirection side) { return powerHandler.getPowerReceiver(); } @Override public World getWorld() { return worldObj; } protected float getPowerUsePerTick() { return capacitorType.capacitor.getMaxEnergyExtracted(); } // RF Power @Override public int receiveEnergy(ForgeDirection from, int maxReceive, boolean simulate) { return PowerHandlerUtil.recieveRedstoneFlux(from, powerHandler, maxReceive, simulate); } @Override public int extractEnergy(ForgeDirection from, int maxExtract, boolean simulate) { return 0; } @Override public boolean canInterface(ForgeDirection from) { return true; } @Override public int getEnergyStored(ForgeDirection from) { return (int) (powerHandler.getEnergyStored() * 10); } @Override public int getMaxEnergyStored(ForgeDirection from) { return (int) (powerHandler.getMaxEnergyStored() * 10); } public int getMaxEnergyStoredMJ() { return (int)powerHandler.getMaxEnergyStored(); } @Override public void updateEntity() { if(worldObj == null) { // sanity check return; } if(worldObj.isRemote) { // check if the block on the client needs to update its texture if(isActive() != lastActive) { worldObj.markBlockForUpdate(xCoord, yCoord, zCoord); } lastActive = isActive(); return; } // else is server, do all logic only on the server updateStoredEnergyFromPowerHandler(); boolean requiresClientSync = forceClientUpdate; if(forceClientUpdate) { // First update, send state to client forceClientUpdate = false; } boolean prevRedCheck = redstoneCheckPassed; if(redstoneStateDirty) { redstoneCheckPassed = RedstoneControlMode.isConditionMet(redstoneControlMode, this); redstoneStateDirty = false; } if(worldObj.getTotalWorldTime() % 20 == 0) { requiresClientSync |= doSideIo(); } requiresClientSync |= prevRedCheck != redstoneCheckPassed; requiresClientSync |= processTasks(redstoneCheckPassed); requiresClientSync |= (lastSyncPowerStored != storedEnergy && worldObj.getTotalWorldTime() % 5 == 0); if(requiresClientSync) { lastSyncPowerStored = storedEnergy; // this will cause 'getPacketDescription()' to be called and its result // will be sent to the PacketHandler on the other end of // client/server connection worldObj.markBlockForUpdate(xCoord, yCoord, zCoord); // And this will make sure our current tile entity state is saved markDirty(); } } protected boolean doSideIo() { if(faceModes == null) { return false; } boolean res = false; Set<Entry<ForgeDirection, IoMode>> ents = faceModes.entrySet(); for(Entry<ForgeDirection, IoMode> ent : ents) { IoMode mode = ent.getValue(); if(mode.pulls()) { res = res | doPull(ent.getKey()); } if(mode.pushes()) { res = res | doPush(ent.getKey()); } } return res; } protected boolean doPush(ForgeDirection dir) { BlockCoord loc = getLocation().getLocation(dir); TileEntity te = worldObj.getTileEntity(loc.x, loc.y, loc.z); boolean res = false; if(slotDefinition.getNumOutputSlots() <= 0) { return false; } for(int i=slotDefinition.minOutputSlot; i<= slotDefinition.maxOutputSlot;i++) { ItemStack item = inventory[i]; if(item != null) { int num = ItemUtil.doInsertItem(te, item, dir.getOpposite()); if(num > 0) { item.stackSize -= num; if(item.stackSize <= 0) { item = null; } inventory[i] = item; markDirty(); res = true; } } } return res; } protected boolean doPull(ForgeDirection key) { return false; } protected void updateStoredEnergyFromPowerHandler() { storedEnergy = (float) powerHandler.getEnergyStored(); } protected abstract boolean processTasks(boolean redstoneCheckPassed); @Override public void readCustomNBT(NBTTagCompound nbtRoot) { facing = nbtRoot.getShort("facing"); setCapacitor(Capacitors.values()[nbtRoot.getShort("capacitorType")]); float storedEnergy = nbtRoot.getFloat("storedEnergy"); powerHandler.setEnergy(storedEnergy); // For the client as provider is not saved to NBT this.storedEnergy = storedEnergy; redstoneCheckPassed = nbtRoot.getBoolean("redstoneCheckPassed"); // read in the inventories contents inventory = new ItemStack[slotDefinition.getNumSlots()]; NBTTagList itemList = (NBTTagList) nbtRoot.getTag("Items"); for (int i = 0; i < itemList.tagCount(); i++) { NBTTagCompound itemStack = itemList.getCompoundTagAt(i); byte slot = itemStack.getByte("Slot"); if(slot >= 0 && slot < inventory.length) { inventory[slot] = ItemStack.loadItemStackFromNBT(itemStack); } } int rsContr = nbtRoot.getInteger("redstoneControlMode"); if(rsContr < 0 || rsContr >= RedstoneControlMode.values().length) { rsContr = 0; } redstoneControlMode = RedstoneControlMode.values()[rsContr]; if(nbtRoot.hasKey("hasFaces")) { for (ForgeDirection dir : ForgeDirection.VALID_DIRECTIONS) { if(nbtRoot.hasKey("face" + dir.ordinal())) { setIoMode(dir, IoMode.values()[nbtRoot.getShort("face" + dir.ordinal())]); } } } } @Override public void writeCustomNBT(NBTTagCompound nbtRoot) { nbtRoot.setShort("facing", facing); nbtRoot.setFloat("storedEnergy", storedEnergy); nbtRoot.setShort("capacitorType", (short) capacitorType.ordinal()); nbtRoot.setBoolean("redstoneCheckPassed", redstoneCheckPassed); // write inventory list NBTTagList itemList = new NBTTagList(); for (int i = 0; i < inventory.length; i++) { if(inventory[i] != null) { NBTTagCompound itemStackNBT = new NBTTagCompound(); itemStackNBT.setByte("Slot", (byte) i); inventory[i].writeToNBT(itemStackNBT); itemList.appendTag(itemStackNBT); } } nbtRoot.setTag("Items", itemList); nbtRoot.setInteger("redstoneControlMode", redstoneControlMode.ordinal()); //face modes if(faceModes != null) { nbtRoot.setByte("hasFaces", (byte) 1); for (Entry<ForgeDirection, IoMode> e : faceModes.entrySet()) { nbtRoot.setShort("face" + e.getKey().ordinal(), (short) e.getValue().ordinal()); } } } @Override public boolean isUseableByPlayer(EntityPlayer player) { if(worldObj == null) { return true; } if(worldObj.getTileEntity(xCoord, yCoord, zCoord) != this) { return false; } return player.getDistanceSq(xCoord + 0.5D, yCoord + 0.5D, zCoord + 0.5D) <= 64D; } @Override public int getSizeInventory() { return slotDefinition.getNumSlots(); } @Override public int getInventoryStackLimit() { return 64; } @Override public ItemStack getStackInSlot(int slot) { if(slot < 0 || slot >= inventory.length) { return null; } return inventory[slot]; } @Override public ItemStack decrStackSize(int fromSlot, int amount) { ItemStack fromStack = inventory[fromSlot]; if(fromStack == null) { return null; } if(fromStack.stackSize <= amount) { inventory[fromSlot] = null; updateCapacitorFromSlot(); return fromStack; } ItemStack result = new ItemStack(fromStack.getItem(), amount, fromStack.getItemDamage()); if(fromStack.stackTagCompound != null) { result.stackTagCompound = (NBTTagCompound) fromStack.stackTagCompound.copy(); } fromStack.stackSize -= amount; return result; } @Override public void setInventorySlotContents(int slot, ItemStack contents) { if(contents == null) { inventory[slot] = contents; } else { inventory[slot] = contents.copy(); } if(contents != null && contents.stackSize > getInventoryStackLimit()) { contents.stackSize = getInventoryStackLimit(); } if(slotDefinition.isUpgradeSlot(slot)) { updateCapacitorFromSlot(); } } private void updateCapacitorFromSlot() { if(slotDefinition.getNumUpgradeSlots() <= 0) { setCapacitor(Capacitors.BASIC_CAPACITOR); return; } ItemStack contents = inventory[slotDefinition.minUpgradeSlot]; if(contents == null || contents.getItem() != EnderIO.itemBasicCapacitor) { setCapacitor(Capacitors.BASIC_CAPACITOR); } else { setCapacitor(Capacitors.values()[contents.getItemDamage()]); } } @Override public ItemStack getStackInSlotOnClosing(int i) { return null; } @Override public void openInventory() { } @Override public void closeInventory() { } @Override public String getInventoryName() { return getMachineName(); } @Override public boolean hasCustomInventoryName() { return false; } @Override public int[] getAccessibleSlotsFromSide(int var1) { ForgeDirection dir = ForgeDirection.getOrientation(var1); IoMode mode = getIoMode(dir); if(mode == IoMode.DISABLED) { return new int[0]; } return allSlots; } @Override public boolean canInsertItem(int var1, ItemStack var2, int var3) { return true; } @Override public boolean canExtractItem(int var1, ItemStack var2, int var3) { return true; } public void onNeighborBlockChange(Block blockId) { redstoneStateDirty = true; } }
package edu.wustl.query.util.querysuite; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.http.HttpSession; import edu.common.dynamicextensions.domaininterface.AttributeInterface; import edu.common.dynamicextensions.domaininterface.EntityInterface; import edu.wustl.common.beans.SessionDataBean; import edu.wustl.common.query.queryobject.impl.OutputTreeDataNode; import edu.wustl.common.querysuite.queryobject.IOutputTerm; import edu.wustl.common.querysuite.queryobject.IQuery; import edu.wustl.common.tree.QueryTreeNodeData; import edu.wustl.query.util.global.Constants; /** * @author santhoshkumar_c * */ public class QueryDetails { //private QueryDetails(){} private List<OutputTreeDataNode> rootOutputTreeNodeList; private Map<String, OutputTreeDataNode> uniqueIdNodesMap; private Map<EntityInterface, List<EntityInterface>> mainEntityMap; private SessionDataBean sessionData; private String randomNumber; private Map<AttributeInterface, String> attributeColumnNameMap; private Map<String, IOutputTerm> outputTermsColumns; private IQuery query; private List<EntityInterface> mainEntityList; private List<QueryTreeNodeData> treeDataVector; private OutputTreeDataNode currentSelectedObject; private Map<Integer,String> parentNodesIdMap ; private Map <Integer,List<EntityInterface>> eachExpressionContainmentMap ; private Map <Integer,List<Integer>> mainExpEntityExpressionIdMap; //private HttpSession session; /** * @return the mainExpEntityExpressionIdMap */ public Map<Integer,List<Integer>> getMainExpEntityExpressionIdMap() { return mainExpEntityExpressionIdMap; } /** * @param mainExpEntityExpressionIdMap the mainExpEntityExpressionIdMap to set */ public void setMainExpEntityExpressionIdMap( Map<Integer,List<Integer>> mainExpEntityExpressionIdMap) { this.mainExpEntityExpressionIdMap = mainExpEntityExpressionIdMap; } /** * @return the eachExpressionContainmentMap */ public Map<Integer, List<EntityInterface>> getEachExpressionContainmentMap() { return eachExpressionContainmentMap; } /** * @param eachExpressionContainmentMap the eachExpressionContainmentMap to set */ public void setEachExpressionContainmentMap( Map<Integer, List<EntityInterface>> eachExpressionContainmentMap) { this.eachExpressionContainmentMap = eachExpressionContainmentMap; } /** * @return the parentNodesIdMap */ public Map<Integer, String> getParentNodesIdMap() { return parentNodesIdMap; } /** * @param parentNodesIdMap the parentNodesIdMap to set */ public void setParentNodesIdMap(Map<Integer, String> parentNodesIdMap) { this.parentNodesIdMap = parentNodesIdMap; } /** * @return the currentSelectedObject */ public OutputTreeDataNode getCurrentSelectedObject() { return currentSelectedObject; } /** * @param currentSelectedObject the currentSelectedObject to set */ public void setCurrentSelectedObject(OutputTreeDataNode currentSelectedObject) { this.currentSelectedObject = currentSelectedObject; } /** * @return the treeDataVector */ public List<QueryTreeNodeData> getTreeDataVector() { return treeDataVector; } /** * @param treeDataVector the treeDataVector to set */ public void setTreeDataVector(List<QueryTreeNodeData> treeDataVector) { this.treeDataVector = treeDataVector; } /** * @return the mainEntityList */ public List<EntityInterface> getMainEntityList() { return mainEntityList; } /** * @param mainEntityList the mainEntityList to set */ public void setMainEntityList(List<EntityInterface> mainEntityList) { this.mainEntityList = mainEntityList; } public QueryDetails(HttpSession session) { //this.session = session; rootOutputTreeNodeList = (List<OutputTreeDataNode>) session .getAttribute(Constants.SAVE_TREE_NODE_LIST); uniqueIdNodesMap = (Map<String, OutputTreeDataNode>) session .getAttribute(Constants.ID_NODES_MAP); mainEntityMap = (Map<EntityInterface, List<EntityInterface>>) session .getAttribute(Constants.MAIN_ENTITY_MAP); sessionData = (SessionDataBean) session.getAttribute(Constants.SESSION_DATA); this.randomNumber = QueryModuleUtil.generateRandomNumber(session); attributeColumnNameMap = (Map<AttributeInterface, String>) session .getAttribute(Constants.ATTRIBUTE_COLUMN_NAME_MAP); outputTermsColumns = (Map<String, IOutputTerm>) session .getAttribute(Constants.OUTPUT_TERMS_COLUMNS); query = (IQuery) session.getAttribute(Constants.QUERY_OBJECT); } /** * * @return */ public Map<AttributeInterface, String> getAttributeColumnNameMap() { return attributeColumnNameMap; } /** * * @param attributeColumnNameMap */ public void setAttributeColumnNameMap(Map<AttributeInterface, String> attributeColumnNameMap) { this.attributeColumnNameMap = attributeColumnNameMap; } /** * @return the rootOutputTreeNodeList */ public List<OutputTreeDataNode> getRootOutputTreeNodeList() { return rootOutputTreeNodeList; } /** * @param rootOutputTreeNodeList the rootOutputTreeNodeList to set */ public void setRootOutputTreeNodeList(List<OutputTreeDataNode> rootOutputTreeNodeList) { this.rootOutputTreeNodeList = rootOutputTreeNodeList; } /** * @return the mainEntityMap */ public Map<EntityInterface, List<EntityInterface>> getMainEntityMap() { return mainEntityMap; } /** * @param mainEntityMap the mainEntityMap to set */ public void setMainEntityMap(Map<EntityInterface, List<EntityInterface>> mainEntityMap) { this.mainEntityMap = mainEntityMap; } /** * @return the randomNumber */ public String getRandomNumber() { return randomNumber; } /** * @param randomNumber the randomNumber to set */ public void setRandomNumber(String randomNumber) { this.randomNumber = randomNumber; } /** * @return the sessionData */ public SessionDataBean getSessionData() { return sessionData; } /** * @param sessionData the sessionData to set */ public void setSessionData(SessionDataBean sessionData) { this.sessionData = sessionData; } /** * @return the uniqueIdNodesMap */ public Map<String, OutputTreeDataNode> getUniqueIdNodesMap() { return uniqueIdNodesMap; } /** * @param uniqueIdNodesMap the uniqueIdNodesMap to set */ public void setUniqueIdNodesMap(Map<String, OutputTreeDataNode> uniqueIdNodesMap) { this.uniqueIdNodesMap = uniqueIdNodesMap; } /** * @return the outputTermsColumns */ public Map<String, IOutputTerm> getOutputTermsColumns() { return outputTermsColumns; } /** * @param outputTermsColumns the outputTermsColumns to set */ public void setOutputTermsColumns(Map<String, IOutputTerm> outputTermsColumns) { this.outputTermsColumns = outputTermsColumns; } /** * @return the query */ public IQuery getQuery() { return query; } /** * @param query the query to set */ public void setQuery(IQuery query) { this.query = query; } }
package com.haulmont.cuba.web.gui.components; import com.haulmont.cuba.gui.AppConfig; import com.haulmont.cuba.gui.ComponentVisitor; import com.haulmont.cuba.gui.ComponentsHelper; import com.haulmont.cuba.gui.components.Component; import com.haulmont.cuba.gui.components.IFrame; import com.haulmont.cuba.gui.components.TabSheet; import com.haulmont.cuba.gui.components.Window; import com.haulmont.cuba.gui.data.impl.DsContextImplementation; import com.haulmont.cuba.gui.settings.Settings; import com.haulmont.cuba.gui.xml.layout.ComponentLoader; import com.vaadin.ui.Layout; import org.dom4j.Element; import java.util.*; /** * @author abramov * @version $Id$ */ public class WebTabSheet extends WebAbstractComponent<com.vaadin.ui.TabSheet> implements TabSheet, Component.Wrapper, Component.Container { protected boolean postInitTaskAdded; protected boolean componentTabChangeListenerInitialized; protected ComponentLoader.Context context; public WebTabSheet() { component = new TabSheetEx(this); component.setCloseHandler(new DefaultCloseHandler()); } protected Map<String, Tab> tabs = new HashMap<>(); protected Map<com.vaadin.ui.Component, ComponentDescriptor> components = new HashMap<>(); protected Set<com.vaadin.ui.Component> lazyTabs = new HashSet<>(); protected Set<TabChangeListener> listeners = new HashSet<>(); @Override public void add(Component component) { throw new UnsupportedOperationException(); } @Override public void remove(Component component) { throw new UnsupportedOperationException(); } @Override public <T extends Component> T getOwnComponent(String id) { for (Tab tab : tabs.values()) { if (tab.getComponent() instanceof Container) { final Component component = WebComponentsHelper.getComponent((Container) tab.getComponent(), id); if (component != null) { return (T) component; } } } return null; } @Override public <T extends Component> T getComponent(String id) { return WebComponentsHelper.getComponent(this, id); } @Override public Collection<Component> getOwnComponents() { List<Component> componentList = new ArrayList<>(); for (ComponentDescriptor cd : components.values()) { componentList.add(cd.component); } return componentList; } @Override public Collection<Component> getComponents() { return ComponentsHelper.getComponents(this); } protected class Tab implements TabSheet.Tab { private String name; private Component component; private TabCloseHandler closeHandler; public Tab(String name, Component component) { this.name = name; this.component = component; } @Override public String getName() { return name; } @Override public void setName(String name) { this.name = name; } @Override public String getCaption() { return WebTabSheet.this.component.getTab(WebComponentsHelper.unwrap(component)).getCaption(); } @Override public void setCaption(String caption) { WebTabSheet.this.component.getTab(WebComponentsHelper.unwrap(component)).setCaption(caption); } @Override public boolean isEnabled() { return WebTabSheet.this.component.getTab(WebComponentsHelper.unwrap(component)).isEnabled(); } @Override public void setEnabled(boolean enabled) { WebTabSheet.this.component.getTab(WebComponentsHelper.unwrap(component)).setEnabled(enabled); } @Override public boolean isVisible() { return WebTabSheet.this.component.getTab(WebComponentsHelper.unwrap(component)).isVisible(); } @Override public void setVisible(boolean visible) { WebTabSheet.this.component.getTab(WebComponentsHelper.unwrap(component)).setVisible(visible); } @Override public boolean isClosable() { com.vaadin.ui.TabSheet.Tab tab = WebTabSheet.this.component.getTab(WebComponentsHelper.unwrap(component)); return tab.isClosable(); } @Override public void setClosable(boolean closable) { com.vaadin.ui.TabSheet.Tab tab = WebTabSheet.this.component.getTab(WebComponentsHelper.unwrap(component)); tab.setClosable(closable); } @Override public boolean isDetachable() { return false; } @Override public void setDetachable(boolean detachable) { } public TabCloseHandler getCloseHandler() { return closeHandler; } @Override public void setCloseHandler(TabCloseHandler tabCloseHandler) { this.closeHandler = tabCloseHandler; } public Component getComponent() { return component; } @Override public void setCaptionStyleName(String styleName) { // vaadin7 // com.vaadin.ui.TabSheet.Tab vaadinTab = WebTabSheet.this.component.getTab(WebComponentsHelper.unwrap(component)); // vaadinTab.setCaptionStyle(styleName); } } @Override public TabSheet.Tab addTab(String name, Component component) { final Tab tab = new Tab(name, component); this.tabs.put(name, tab); final com.vaadin.ui.Component tabComponent = WebComponentsHelper.unwrap(component); tabComponent.setSizeFull(); this.components.put(tabComponent, new ComponentDescriptor(name, component)); this.component.addTab(tabComponent); return tab; } @Override public TabSheet.Tab addLazyTab(String name, Element descriptor, ComponentLoader loader) { WebVBoxLayout tabContent = new WebVBoxLayout(); Layout layout = tabContent.getComponent(); layout.setSizeFull(); final Tab tab = new Tab(name, tabContent); tabs.put(name, tab); final com.vaadin.ui.Component tabComponent = WebComponentsHelper.unwrap(tabContent); tabComponent.setSizeFull(); this.components.put(tabComponent, new ComponentDescriptor(name, tabContent)); this.component.addTab(tabComponent); lazyTabs.add(tabComponent); this.component.addSelectedTabChangeListener(new LazyTabChangeListener(tabContent, descriptor, loader)); context = loader.getContext(); if (!postInitTaskAdded) { context.addPostInitTask(new ComponentLoader.PostInitTask() { @Override public void execute(ComponentLoader.Context context, IFrame window) { initComponentTabChangeListener(); } }); postInitTaskAdded = true; } return tab; } @Override public void removeTab(String name) { final Tab tab = tabs.get(name); if (tab == null) { throw new IllegalStateException(String.format("Can't find tab '%s'", name)); } tabs.remove(name); com.vaadin.ui.Component vComponent = WebComponentsHelper.unwrap(tab.getComponent()); this.components.remove(vComponent); this.component.removeComponent(vComponent); } @Override public Tab getTab() { final com.vaadin.ui.Component component = this.component.getSelectedTab(); if (component == null) { return null; } final String name = components.get(component).getName(); return tabs.get(name); } @Override public void setTab(TabSheet.Tab tab) { this.component.setSelectedTab(WebComponentsHelper.unwrap(((Tab) tab).getComponent())); } @Override public void setTab(String name) { Tab tab = tabs.get(name); if (tab == null) { throw new IllegalStateException(String.format("Can't find tab '%s'", name)); } this.component.setSelectedTab(WebComponentsHelper.unwrap(tab.getComponent())); } @Override public TabSheet.Tab getTab(String name) { return tabs.get(name); } @Override public Collection<TabSheet.Tab> getTabs() { return (Collection) tabs.values(); } @Override public void addListener(TabChangeListener listener) { initComponentTabChangeListener(); listeners.add(listener); } private void initComponentTabChangeListener() { // init component SelectedTabChangeListener only when needed, making sure it is // after all lazy tabs listeners if (!componentTabChangeListenerInitialized) { component.addSelectedTabChangeListener(new com.vaadin.ui.TabSheet.SelectedTabChangeListener() { @Override public void selectedTabChange(com.vaadin.ui.TabSheet.SelectedTabChangeEvent event) { // Fire GUI listener fireTabChanged(); // Execute outstanding post init tasks after GUI listener. // We suppose that context.executePostInitTasks() executes a task once and then remove it from task list. if (context != null) { context.executePostInitTasks(); } } }); componentTabChangeListenerInitialized = true; } } @Override public void removeListener(TabChangeListener listener) { listeners.remove(listener); } protected void fireTabChanged() { for (TabChangeListener listener : listeners) { listener.tabChanged(getTab()); } } protected static class TabSheetEx extends com.vaadin.ui.TabSheet implements WebComponentEx { private Component component; private TabSheetEx(Component component) { this.component = component; } @Override public Component asComponent() { return component; } } protected class LazyTabChangeListener implements com.vaadin.ui.TabSheet.SelectedTabChangeListener { private WebAbstractBox tabContent; private Element descriptor; private ComponentLoader loader; public LazyTabChangeListener(WebAbstractBox tabContent, Element descriptor, ComponentLoader loader) { this.tabContent = tabContent; this.descriptor = descriptor; this.loader = loader; } @Override public void selectedTabChange(com.vaadin.ui.TabSheet.SelectedTabChangeEvent event) { com.vaadin.ui.Component selectedTab = WebTabSheet.this.component.getSelectedTab(); com.vaadin.ui.Component tabComponent = tabContent.getComponent(); if (selectedTab == tabComponent && lazyTabs.remove(tabComponent)) { Component comp = loader.loadComponent(AppConfig.getFactory(), descriptor, null); tabContent.add(comp); com.vaadin.ui.Component impl = WebComponentsHelper.getComposition(comp); impl.setSizeFull(); final Window window = com.haulmont.cuba.gui.ComponentsHelper.getWindow(WebTabSheet.this); if (window != null) { com.haulmont.cuba.gui.ComponentsHelper.walkComponents( tabContent, new ComponentVisitor() { @Override public void visit(Component component, String name) { if (component instanceof HasSettings) { Settings settings = window.getSettings(); if (settings != null) { Element e = settings.get(name); ((HasSettings) component).applySettings(e); } } } } ); ((DsContextImplementation) window.getDsContext()).resumeSuspended(); } } } } protected static class ComponentDescriptor { private Component component; private String name; public ComponentDescriptor(String name, Component component) { this.name = name; this.component = component; } public Component getComponent() { return component; } public String getName() { return name; } } protected class DefaultCloseHandler implements com.vaadin.ui.TabSheet.CloseHandler { private static final long serialVersionUID = -6766617382191585632L; @Override public void onTabClose(com.vaadin.ui.TabSheet tabsheet, com.vaadin.ui.Component tabContent) { // have no other way to get tab from tab content for (Tab tab : tabs.values()) { com.vaadin.ui.Component tabComponent = WebComponentsHelper.unwrap(tab.getComponent()); if (tabComponent == tabContent) { if (tab.isClosable()) { doHandleCloseTab(tab); return; } } } } private void doHandleCloseTab(Tab tab) { if (tab.getCloseHandler() != null) { tab.getCloseHandler().onTabClose(tab); } else { removeTab(tab.getName()); } } } }
package com.haulmont.cuba.web.sys.auth; import com.haulmont.cuba.core.global.ConfigProvider; import com.haulmont.cuba.core.global.GlobalConfig; import com.haulmont.cuba.core.global.MessageProvider; import com.haulmont.cuba.core.sys.AppContext; import com.haulmont.cuba.security.global.LoginException; import com.haulmont.cuba.web.WebConfig; import com.haulmont.cuba.web.sys.ActiveDirectoryHelper; import jespa.http.HttpSecurityService; import jespa.ntlm.NtlmSecurityProvider; import jespa.security.PasswordCredential; import jespa.security.SecurityProviderException; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import java.util.HashMap; import java.util.Locale; import java.util.Map; /** * @author artamonov * @version $Id$ */ @SuppressWarnings("unused") public class JespaAuthProvider extends HttpSecurityService implements CubaAuthProvider { private static class DomainInfo { private String bindStr; private String acctName; private String acctPassword; private DomainInfo(String bindStr, String acctName, String acctPassword) { this.acctName = acctName; this.acctPassword = acctPassword; this.bindStr = bindStr; } } private static Map<String, DomainInfo> domains = new HashMap<String, DomainInfo>(); private static String defaultDomain; private Log log = LogFactory.getLog(getClass()); @Override public void init(FilterConfig filterConfig) throws ServletException { initDomains(); Map<String, String> properties = new HashMap<String, String>(); properties.put("jespa.bindstr", getBindStr()); properties.put("jespa.service.acctname", getAcctName()); properties.put("jespa.service.password", getAcctPassword()); properties.put("jespa.account.canonicalForm", "3"); properties.put("jespa.log.path", ConfigProvider.getConfig(GlobalConfig.class).getLogDir() + "/jespa.log"); fillFromSystemProperties(properties); try { super.init(properties); } catch (SecurityProviderException e) { throw new ServletException(e); } } @Override public void destroy() { } @Override public void authenticate(String login, String password, Locale loc) throws LoginException { int p = login.indexOf('\\'); if (p <= 0) throw new LoginException(MessageProvider.getMessage(ActiveDirectoryHelper.class, "activeDirectory.invalidName", loc), login); String domain = login.substring(0, p); String user = login.substring(p + 1); DomainInfo domainInfo = domains.get(domain); if (domainInfo == null) { throw new LoginException( MessageProvider.getMessage(ActiveDirectoryHelper.class, "activeDirectory.unknownDomain", loc), domain ); } Map<String, String> params = new HashMap<String, String>(); params.put("bindstr", domainInfo.bindStr); params.put("service.acctname", domainInfo.acctName); params.put("service.password", domainInfo.acctPassword); params.put("account.canonicalForm", "3"); fillFromSystemProperties(params); NtlmSecurityProvider provider = new NtlmSecurityProvider(params); try { PasswordCredential credential = new PasswordCredential(user, password.toCharArray()); provider.authenticate(credential); } catch (SecurityProviderException e) { throw new LoginException( MessageProvider.getMessage(ActiveDirectoryHelper.class, "activeDirectory.authenticationError", loc), e.getMessage() ); } } private void initDomains() { WebConfig webConfig = ConfigProvider.getConfig(WebConfig.class); String domainsStr = webConfig.getActiveDirectoryDomains(); if (!StringUtils.isBlank(domainsStr)) { String[] strings = domainsStr.split(";"); for (int i = 0; i < strings.length; i++) { String domain = strings[i]; domain = domain.trim(); if (!StringUtils.isBlank(domain)) { String[] parts = domain.split("\\|"); if (parts.length != 4) { log.error("Invalid ActiveDirectory domain definition: " + domain); break; } else { domains.put(parts[0], new DomainInfo(parts[1], parts[2], parts[3])); if (i == 0) defaultDomain = parts[0]; } } } } } public String getDefaultDomain() { return defaultDomain != null ? defaultDomain : ""; } public String getBindStr() { return getBindStr(getDefaultDomain()); } public String getBindStr(String domain) { initDomains(); DomainInfo domainInfo = domains.get(domain); return domainInfo != null ? domainInfo.bindStr : ""; } public String getAcctName() { return getAcctName(getDefaultDomain()); } public String getAcctName(String domain) { initDomains(); DomainInfo domainInfo = domains.get(domain); return domainInfo != null ? domainInfo.acctName : ""; } public String getAcctPassword() { return getAcctPassword(getDefaultDomain()); } public String getAcctPassword(String domain) { initDomains(); DomainInfo domainInfo = domains.get(domain); return domainInfo != null ? domainInfo.acctPassword : ""; } public void fillFromSystemProperties(Map<String, String> params) { for (String name : AppContext.getPropertyNames()) { if (name.startsWith("jespa.")) { params.put(name, AppContext.getProperty(name)); } } } }
package edu.harvard.iq.dataverse; import java.util.List; import javax.ejb.Stateless; import javax.inject.Named; import javax.persistence.EntityManager; import javax.persistence.NoResultException; import javax.persistence.NonUniqueResultException; import javax.persistence.PersistenceContext; import javax.persistence.TypedQuery; /** * * @author xyang */ @Stateless @Named public class DatasetFieldServiceBean implements java.io.Serializable { @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; private static final String NAME_QUERY = "SELECT dsfType from DatasetFieldType dsfType where dsfType.name= :fieldName"; public List<DatasetFieldType> findAllAdvancedSearchFieldTypes() { return em.createQuery("select object(o) from DatasetFieldType as o where o.advancedSearchFieldType = true and o.title != '' order by o.id").getResultList(); } public List<DatasetFieldType> findAllFacetableFieldTypes() { return em.createNamedQuery("DatasetFieldType.findAllFacetable", DatasetFieldType.class) .getResultList(); } public List<DatasetFieldType> findFacetableFieldTypesByMetadataBlock(Long metadataBlockId) { return em.createNamedQuery("DatasetFieldType.findFacetableByMetadaBlock", DatasetFieldType.class) .setParameter("metadataBlockId", metadataBlockId) .getResultList(); } public List<DatasetFieldType> findAllRequiredFields() { return em.createQuery("select object(o) from DatasetFieldType as o where o.required = true order by o.id").getResultList(); } public List<DatasetFieldType> findAllOrderedById() { return em.createQuery("select object(o) from DatasetFieldType as o order by o.id").getResultList(); } public List<DatasetFieldType> findAllOrderedByName() { return em.createQuery("select object(o) from DatasetFieldType as o order by o.name").getResultList(); } public DatasetFieldType find(Object pk) { return (DatasetFieldType) em.find(DatasetFieldType.class, pk); } public DatasetFieldType findByName(String name) { DatasetFieldType dsfType = (DatasetFieldType) em.createQuery(NAME_QUERY).setParameter("fieldName", name).getSingleResult(); return dsfType; } /** * Gets the dataset field type, or returns {@code null}. Does not throw * exceptions. * * @param name the name do the field type * @return the field type, or {@code null} * @see #findByName(java.lang.String) */ public DatasetFieldType findByNameOpt(String name) { try { return em.createNamedQuery("DatasetFieldType.findByName", DatasetFieldType.class) .setParameter("name", name) .getSingleResult(); } catch (NoResultException nre) { return null; } } /* * Similar method for looking up foreign metadata field mappings, for metadata * imports. for these the uniquness of names isn't guaranteed (i.e., there * can be a field "author" in many different formats that we want to support), * so these have to be looked up by both the field name and the name of the * foreign format. */ public ForeignMetadataFieldMapping findFieldMapping(String formatName, String pathName) { try { return em.createNamedQuery("ForeignMetadataFieldMapping.findByPath", ForeignMetadataFieldMapping.class) .setParameter("formatName", formatName) .setParameter("xPath", pathName) .getSingleResult(); } catch (NoResultException nre) { return null; } // TODO: cache looked up results. } public ControlledVocabularyValue findControlledVocabularyValue(Object pk) { return (ControlledVocabularyValue) em.find(ControlledVocabularyValue.class, pk); } /** * @param dsft The DatasetFieldType in which to look up a * ControlledVocabularyValue. * @param strValue String value that may exist in a controlled vocabulary of * the provided DatasetFieldType. * * @return The ControlledVocabularyValue found or null. */ public ControlledVocabularyValue findControlledVocabularyValueByDatasetFieldTypeAndStrValue(DatasetFieldType dsft, String strValue) { TypedQuery<ControlledVocabularyValue> typedQuery = em.createQuery("SELECT OBJECT(o) FROM ControlledVocabularyValue AS o WHERE o.strValue = :strvalue AND o.datasetFieldType = :dsft", ControlledVocabularyValue.class); typedQuery.setParameter("strvalue", strValue); typedQuery.setParameter("dsft", dsft); try { ControlledVocabularyValue cvv = typedQuery.getSingleResult(); return cvv; } catch (NoResultException | NonUniqueResultException ex) { return null; } } // return singleton NA Controled Vocabulary Value public ControlledVocabularyValue findNAControlledVocabularyValue() { TypedQuery<ControlledVocabularyValue> typedQuery = em.createQuery("SELECT OBJECT(o) FROM ControlledVocabularyValue AS o WHERE o.datasetFieldType is null AND o.strValue = :strvalue", ControlledVocabularyValue.class); typedQuery.setParameter("strvalue", DatasetField.NA_VALUE); return typedQuery.getSingleResult(); } public DatasetFieldType save(DatasetFieldType dsfType) { return em.merge(dsfType); } public MetadataBlock save(MetadataBlock mdb) { return em.merge(mdb); } public ControlledVocabularyValue save(ControlledVocabularyValue cvv) { return em.merge(cvv); } }
package edu.rice.cs.caper.bayou.core.synthesizer; import edu.rice.cs.caper.bayou.core.dsl.DSubTree; import org.eclipse.jdt.core.dom.*; import org.eclipse.jdt.core.dom.rewrite.ASTRewrite; import org.eclipse.jdt.core.dom.rewrite.ListRewrite; import org.eclipse.jface.text.BadLocationException; import org.eclipse.jface.text.Document; import java.util.*; public class Visitor extends ASTVisitor { final DSubTree dAST; final Document document; final CompilationUnit cu; String synthesizedProgram; protected ASTRewrite rewriter; Block evidenceBlock; List<Variable> currentScope; private static final Map<String,Class> primitiveToClass; static { Map<String,Class> map = new HashMap<>(); map.put("int", int.class); map.put("long", long.class); map.put("double", double.class); map.put("float", float.class); map.put("boolean", boolean.class); map.put("char", char.class); map.put("byte", byte.class); map.put("void", void.class); map.put("short", short.class); primitiveToClass = Collections.unmodifiableMap(map); } public Visitor(DSubTree dAST, Document document, CompilationUnit cu) { this.dAST = dAST; this.document = document; this.cu = cu; this.rewriter = ASTRewrite.create(this.cu.getAST()); this.currentScope = new ArrayList<>(); } @Override public boolean visit(MethodInvocation invocation) { /* TODO: these checks are the same as in EvidenceExtractor. Make this process better. */ IMethodBinding binding = invocation.resolveMethodBinding(); if (binding == null) throw new RuntimeException("Could not resolve binding. " + "Either CLASSPATH is not set correctly, or there is an invalid evidence type."); ITypeBinding cls = binding.getDeclaringClass(); if (cls == null || !cls.getQualifiedName().equals("edu.rice.cs.caper.bayou.annotations.Evidence")) return false; if (! (invocation.getParent().getParent() instanceof Block)) throw new RuntimeException("Evidence has to be given in a (empty) block."); Block evidenceBlock = (Block) invocation.getParent().getParent(); if (this.evidenceBlock != null) if (this.evidenceBlock != evidenceBlock) throw new RuntimeException("Only one synthesis query at a time is supported."); else return false; /* synthesis is already done */ this.evidenceBlock = evidenceBlock; String name = binding.getName(); if (! (name.equals("apicalls") || name.equals("types") || name.equals("context"))) throw new RuntimeException("Invalid evidence type: " + binding.getName()); Environment env = new Environment(invocation.getAST(), currentScope); Block body; try { body = dAST.synthesize(env); } catch (SynthesisException e) { synthesizedProgram = null; return false; } // Apply dead code elimination here DCEOptimizor dce = new DCEOptimizor(); //body = dce.apply(body); /* make rewrites to the local method body */ body = postprocessLocal(invocation.getAST(), env, body, dce.getEliminatedVars()); rewriter.replace(evidenceBlock, body, null); try { rewriter.rewriteAST(document, null).apply(document); /* make rewrites to the document */ postprocessGlobal(cu.getAST(), env, document); } catch (BadLocationException e) { System.err.println("Could not edit document for some reason.\n" + e.getMessage()); synthesizedProgram = null; return false; } synthesizedProgram = document.get(); return false; } private Block postprocessLocal(AST ast, Environment env, Block body, Set<String> eliminatedVars) { /* add uncaught exeptions */ Set<Class> exceptions = dAST.exceptionsThrown(eliminatedVars); env.imports.addAll(exceptions); if (! exceptions.isEmpty()) { TryStatement statement = ast.newTryStatement(); statement.setBody(body); List<Class> exceptions_ = new ArrayList<>(exceptions); exceptions_.sort((Class e1, Class e2) -> e1.isAssignableFrom(e2)? 1: -1); for (Class except : exceptions_) { CatchClause catchClause = ast.newCatchClause(); SingleVariableDeclaration ex = ast.newSingleVariableDeclaration(); ex.setType(ast.newSimpleType(ast.newName(except.getSimpleName()))); ex.setName(ast.newSimpleName("_e")); catchClause.setException(ex); catchClause.setBody(ast.newBlock()); statement.catchClauses().add(catchClause); } body = ast.newBlock(); body.statements().add(statement); } /* add variable declarations */ for (Variable var : env.mu_scope) { if (!eliminatedVars.contains(var.name)) { VariableDeclarationFragment varDeclFrag = ast.newVariableDeclarationFragment(); varDeclFrag.setName(ast.newSimpleName(var.name)); VariableDeclarationStatement varDeclStmt = ast.newVariableDeclarationStatement(varDeclFrag); if (var.type.isPrimitive()) varDeclStmt.setType(ast.newPrimitiveType(PrimitiveType.toCode(var.type.getSimpleName()))); else varDeclStmt.setType(ast.newSimpleType(ast.newSimpleName(var.type.getSimpleName()))); body.statements().add(0, varDeclStmt); } } return body; } private void postprocessGlobal(AST ast, Environment env, Document document) throws BadLocationException { /* add imports */ ASTRewrite rewriter = ASTRewrite.create(ast); ListRewrite lrw = rewriter.getListRewrite(cu, CompilationUnit.IMPORTS_PROPERTY); Set<Class> toImport = new HashSet<>(env.imports); toImport.addAll(dAST.exceptionsThrown()); // add all catch(...) types to imports for (Class cls : toImport) { if (cls.isPrimitive() || cls.getPackage().getName().equals("java.lang")) continue; ImportDeclaration impDecl = cu.getAST().newImportDeclaration(); String className = cls.getName().replaceAll("\\$", "\\."); impDecl.setName(cu.getAST().newName(className.split("\\."))); lrw.insertLast(impDecl, null); } rewriter.rewriteAST(document, null).apply(document); } /* setup the scope of variables for synthesis */ @Override public boolean visit(MethodDeclaration method) { currentScope.clear(); /* add variables in the formal parameters */ for (Object o : method.parameters()) { SingleVariableDeclaration param = (SingleVariableDeclaration) o; Type t = param.getType(); Class type; if (t.isSimpleType()) { ITypeBinding binding = t.resolveBinding(); if (binding == null) continue; try { type = Environment.getClass(binding.getQualifiedName()); } catch (ClassNotFoundException e) { synthesizedProgram = null; return false; } } else if (t.isPrimitiveType()) type = primitiveToClass.get(((PrimitiveType) t).getPrimitiveTypeCode().toString()); else continue; Variable v = new Variable(param.getName().getIdentifier(), type); currentScope.add(v); } /* add local variables declared in the (beginning of) method body */ Block body = method.getBody(); for (Object o : body.statements()) { Statement stmt = (Statement) o; if (! (stmt instanceof VariableDeclarationStatement)) break; // stop at the first non-variable declaration VariableDeclarationStatement varDecl = (VariableDeclarationStatement) stmt; Type t = varDecl.getType(); Class type; if (t.isSimpleType()) { ITypeBinding binding = t.resolveBinding(); if (binding == null) continue; try { type = Environment.getClass(binding.getQualifiedName()); } catch (ClassNotFoundException e) { synthesizedProgram = null; return false; } } else if (t.isPrimitiveType()) type = primitiveToClass.get(((PrimitiveType) t).getPrimitiveTypeCode().toString()); else continue; for (Object f : varDecl.fragments()) { VariableDeclarationFragment frag = (VariableDeclarationFragment) f; Variable v = new Variable(frag.getName().getIdentifier(), type); currentScope.add(v); } } return true; } }
package com.jetbrains.python.console; import com.intellij.codeInsight.lookup.Lookup; import com.intellij.codeInsight.lookup.LookupManager; import com.intellij.execution.*; import com.intellij.execution.console.LanguageConsoleImpl; import com.intellij.execution.console.LanguageConsoleViewImpl; import com.intellij.execution.executors.DefaultRunExecutor; import com.intellij.execution.process.*; import com.intellij.execution.ui.RunContentDescriptor; import com.intellij.execution.ui.actions.CloseAction; import com.intellij.ide.CommonActionsManager; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.Result; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.module.ModuleUtil; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.encoding.EncodingManager; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.openapi.wm.ToolWindow; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.psi.PsiFile; import com.intellij.util.PairProcessor; import com.jetbrains.django.run.ExecutionHelper; import com.jetbrains.django.run.Runner; import com.jetbrains.python.psi.LanguageLevel; import com.jetbrains.python.sdk.PythonSdkType; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.Charset; import java.util.Arrays; /** * @author oleg */ public class PyConsoleRunner { protected final Project myProject; protected final String myConsoleTitle; private OSProcessHandler myProcessHandler; protected final CommandLineArgumentsProvider myProvider; protected final String myWorkingDir; protected LanguageConsoleViewImpl myConsoleView; private final ConsoleHistoryModel myHistory = new ConsoleHistoryModel(); private AnAction myRunAction; public PyConsoleRunner(@NotNull final Project project, @NotNull final String consoleTitle, @NotNull final CommandLineArgumentsProvider provider, @Nullable final String workingDir) { myProject = project; myConsoleTitle = consoleTitle; myProvider = provider; myWorkingDir = workingDir; } public static void run(@NotNull final Project project, @NotNull final String consoleTitle, @NotNull final CommandLineArgumentsProvider provider, @Nullable final String workingDir) { final PyConsoleRunner consoleRunner = new PyConsoleRunner(project, consoleTitle, provider, workingDir); try { consoleRunner.initAndRun(); } catch (ExecutionException e) { ExecutionHelper.showErrors(project, Arrays.<Exception>asList(e), consoleTitle, null); } } public void initAndRun() throws ExecutionException { // Create Server process final Process process = createProcess(); // Init console view myConsoleView = createConsoleView(); myProcessHandler = createProcessHandler(process); ProcessTerminatedListener.attach(myProcessHandler); // Set language level for (Module module : ModuleManager.getInstance(myProject).getModules()) { final Sdk pythonSdk = PythonSdkType.findPythonSdk(module); if (pythonSdk != null){ final LanguageLevel languageLevel = PythonSdkType.getLanguageLevelForSdk(pythonSdk); final PsiFile psiFile = getLanguageConsole().getFile(); // Set module explicitly psiFile.putUserData(ModuleUtil.KEY_MODULE, module); final VirtualFile vFile = psiFile.getVirtualFile(); if (vFile != null) { // Set language level vFile.putUserData(LanguageLevel.KEY, languageLevel); } break; } } myProcessHandler.addProcessListener(new ProcessAdapter() { @Override public void processTerminated(ProcessEvent event) { myRunAction.getTemplatePresentation().setEnabled(false); myConsoleView.getConsole().setPrompt(""); myConsoleView.getConsole().getConsoleEditor().setRendererMode(true); ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { myConsoleView.getConsole().getConsoleEditor().getComponent().updateUI(); } }); } }); // Setup default prompt myConsoleView.getConsole().setPrompt(PyConsoleHighlightingUtil.ORDINARY_PROMPT.trim()); // Attach to process myConsoleView.attachToProcess(myProcessHandler); // Add filter TODO[oleg]: Add stacktrace filters // myConsoleView.addMessageFilter(new OutputConsoleFilter()); // Runner creating final Executor defaultExecutor = ExecutorRegistry.getInstance().getExecutorById(DefaultRunExecutor.EXECUTOR_ID); final DefaultActionGroup toolbarActions = new DefaultActionGroup(); final ActionToolbar actionToolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, toolbarActions, false); // Runner creating final JPanel panel = new JPanel(new BorderLayout()); panel.add(actionToolbar.getComponent(), BorderLayout.WEST); panel.add(myConsoleView.getComponent(), BorderLayout.CENTER); final RunContentDescriptor myDescriptor = new RunContentDescriptor(myConsoleView, myProcessHandler, panel, myConsoleTitle); // tool bar actions final AnAction[] actions = fillToolBarActions(toolbarActions, defaultExecutor, myDescriptor); registerActionShortcuts(actions, getLanguageConsole().getConsoleEditor().getComponent()); registerActionShortcuts(actions, panel); panel.updateUI(); // Show in run toolwindow ExecutionManager.getInstance(myProject).getContentManager().showRunContent(defaultExecutor, myDescriptor); // Request focus final ToolWindow window = ToolWindowManager.getInstance(myProject).getToolWindow(defaultExecutor.getId()); window.activate(new Runnable() { public void run() { IdeFocusManager.getInstance(myProject).requestFocus(getLanguageConsole().getCurrentEditor().getContentComponent(), true); } }); // Run myProcessHandler.startNotify(); } protected LanguageConsoleViewImpl createConsoleView() { return new PyLanguageConsoleView(myProject, myConsoleTitle); } @Nullable protected Process createProcess() throws ExecutionException { return Runner.createProcess(myWorkingDir, true, myProvider.getAdditionalEnvs(), myProvider.getArguments()); } private PyConsoleProcessHandler createProcessHandler(final Process process) { final Charset outputEncoding = EncodingManager.getInstance().getDefaultCharset(); return new PyConsoleProcessHandler(process, myConsoleView.getConsole(), getProviderCommandLine(myProvider), outputEncoding); } private void registerActionShortcuts(final AnAction[] actions, final JComponent component) { for (AnAction action : actions) { if (action.getShortcutSet() != null) { action.registerCustomShortcutSet(action.getShortcutSet(), component); } } } private AnAction[] fillToolBarActions(final DefaultActionGroup toolbarActions, final Executor defaultExecutor, final RunContentDescriptor myDescriptor) { //stop final AnAction stopAction = createStopAction(); toolbarActions.add(stopAction); //close final AnAction closeAction = createCloseAction(defaultExecutor, myDescriptor); toolbarActions.add(closeAction); // run action myRunAction = new DumbAwareAction(null, null, IconLoader.getIcon("/actions/execute.png")) { public void actionPerformed(final AnActionEvent e) { runExecuteActionInner(true); } public void update(final AnActionEvent e) { final EditorEx editor = getLanguageConsole().getConsoleEditor(); final Lookup lookup = LookupManager.getActiveLookup(editor); e.getPresentation().setEnabled(!myProcessHandler.isProcessTerminated() && (lookup == null || !lookup.isCompletion())); } }; try { // TODO[oleg] fix when Maia compatibility doesn't care EmptyAction.setupAction(myRunAction, "Console.Execute", null); } catch (NullPointerException e) { EmptyAction.setupAction(myRunAction, "Python.Console.Execute", null); } toolbarActions.add(myRunAction); // Help toolbarActions.add(CommonActionsManager.getInstance().createHelpAction("interactive_console")); // history actions final PairProcessor<AnActionEvent, String> historyProcessor = new PairProcessor<AnActionEvent, String>() { public boolean process(final AnActionEvent e, final String s) { new WriteCommandAction(myProject, getLanguageConsole().getFile()) { protected void run(final Result result) throws Throwable { getLanguageConsole().getEditorDocument().setText(s == null? "" : s); } }.execute(); return true; } }; final AnAction historyNextAction = ConsoleHistoryModel.createHistoryAction(myHistory, true, historyProcessor); final AnAction historyPrevAction = ConsoleHistoryModel.createHistoryAction(myHistory, false, historyProcessor); historyNextAction.getTemplatePresentation().setVisible(false); historyPrevAction.getTemplatePresentation().setVisible(false); toolbarActions.add(historyNextAction); toolbarActions.add(historyPrevAction); return new AnAction[]{stopAction, closeAction, myRunAction, historyNextAction, historyPrevAction}; } protected AnAction createCloseAction(final Executor defaultExecutor, final RunContentDescriptor myDescriptor) { return new CloseAction(defaultExecutor, myDescriptor, myProject); } protected AnAction createStopAction() { return ActionManager.getInstance().getAction(IdeActions.ACTION_STOP_PROGRAM); } protected void sendInput(final String input) { final Charset charset = myProcessHandler.getCharset(); final OutputStream outputStream = myProcessHandler.getProcessInput(); try { byte[] bytes = input.getBytes(charset.name()); outputStream.write(bytes); outputStream.flush(); } catch (IOException e) { // ignore } } public LanguageConsoleImpl getLanguageConsole() { return myConsoleView.getConsole(); } private void runExecuteActionInner(final boolean erase) { // Process input and add to history final Document document = getLanguageConsole().getCurrentEditor().getDocument(); final String documentText = document.getText(); final TextRange range = new TextRange(0, document.getTextLength()); getLanguageConsole().getCurrentEditor().getSelectionModel().setSelection(range.getStartOffset(), range.getEndOffset()); getLanguageConsole().addCurrentToHistory(range, false); if (erase) { getLanguageConsole().setInputText(""); } final String line = documentText; if (!StringUtil.isEmptyOrSpaces(line)){ myHistory.addToHistory(line); } // Send to interpreter / server final String text2send = line.length() == 0 ? "\n\n" : line + "\n"; sendInput(text2send); if (myConsoleView instanceof ConsoleNotification){ ((ConsoleNotification)myConsoleView).inputSent(text2send); } } private static String getProviderCommandLine(final CommandLineArgumentsProvider provider) { final StringBuilder builder = new StringBuilder(); for (String s : provider.getArguments()) { if (builder.length() > 0){ builder.append(' '); } builder.append(s); } return builder.toString(); } public Project getProject() { return myProject; } }
package harmony.mastermind.logic.parser; import java.util.Calendar; public class ParserMemoryMain { private static final String STRING_AND = "and"; private static final String STRING_ON = "on"; private static final String STRING_FROM = "from"; private static final String STRING_TO = "to"; private static final String STRING_BETWEEN = "between"; private static final String STRING_BEFORE = "before"; private static final String STRING_TILL = "till"; private static final String STRING_UNTIL = "until"; private static final String STRING_BY = "by"; private static final String STRING_DEADLINE = "deadline"; //Commands for start date protected static final int BETWEEN = 13; protected static final int ON = 12; protected static final int FROM = 11; //Commands for end date protected static final int AND = 7; protected static final int TILL = 6; protected static final int TO = 5; protected static final int BEFORE = 4; protected static final int UNTIL = 3; protected static final int BY = 2; protected static final int DEADLINE = 1; protected static String command; protected static String taskName; protected static String description; protected static final int INVALID_INT = -1; protected static int type; protected static int length; protected static boolean containsDescription; protected static boolean setProper; private static int day; private static int month; private static int year; private static final int INT_INVALID = -1; private static final int INVALID_STRING = 0; private static final String INVALID_DATE_TIME = "Invalid date/time: "; private static final String INVALID_COMMAND = "Invalid command, please try again"; //@@author A0143378Y //General getters and setters public static String getCommand() { return command; } //@@author A0143378Y public static void setCommand(String newCommand) { command = newCommand; } //@@author A0143378Y public static void setTaskName(String newName) { taskName = newName; } //@@author A0143378Y protected static void setDescription(String newDescription) { description = newDescription; } //@@author A0143378Y protected static void setLength(int newLength) { length = newLength; } //@@author A0143378Y protected static void setType(int newType) { type = newType; } //@@author A0143378Y protected static void setContainsDescription(boolean cd) { containsDescription = cd; } //@@author A0143378Y protected static void setProper(boolean sp) { setProper = sp; } //@@author A014338Y /* * Set date to a calendar object setEvent * If set, return true. */ protected static boolean setDate(String date, Calendar setEvent) { boolean isValid = false; initialiseDate(); getDate(date); isValid = setDateIfContainDDMMYY(day, month, year, setEvent); return isValid; } //@@author A0143378Y protected static boolean setTime(String time, Calendar setEvent) { int newTime = INT_INVALID; int hour = 23, minute = 59; boolean isValid = false; String timeReduced = reduceToInt(time); newTime = convertToInt(timeReduced); //Checks that time string has exactly 4 digit. if(timeReduced.length() == 4 && newTime != INVALID_INT){ minute = newTime%100; hour = newTime/100; } //Checks that the time set is valid if(!(invalidMinute(minute)||invalidHour(hour)||timeReduced.length()!=4)){ isValid = setTimeIfHHMMSS(hour, minute, setEvent); } return isValid; } //@@author A0143378Y /* * Check the format the date is in * dd/mm/yy or dd-mm-yy and parse accordingly */ protected static void getDate(String date){ if(date.contains("/")){ getInt("/", date); }else if(date.contains("-")){ getInt("-", date); } } //@@author A0143378Y /* * Parse the date string with the symbol "/" or "-" */ protected static void getInt(String symbol, String date){ String[] details = date.split(symbol); boolean dateIsNumeric = true; //Check that date has all 3 component: day, month and year if(details.length == 3){ dateIsNumeric = checkIfDateIsNumeric(details); } if(dateIsNumeric){ setDDMMYY(details); } } //@@author A0143378Y /* * Returns true if user command is an empty string or contains symbols only */ protected static Boolean isUselessCommand(String input){ if(reduceToIntAndChar(input).length()==0){ return true; }else{ return false; } } //@@author A0143378Y /* * Returns true if end date and time is before start date and time */ protected static Boolean endIsBeforeStart(Calendar start, Calendar end){ return end.before(start); } //@@author A0143378Y /* * Checks if string contains number only * Returns true if it does */ protected static boolean isNumeric(String temp){ try{ Integer.parseInt(temp); }catch(NumberFormatException e){ return false; } return true; } //@@author A0143378Y /* * Prints error message and set setProper as false */ protected static void generalError(){ System.out.println(INVALID_COMMAND); setProper(false); } //@@author A0143378Y protected static String removeAllInt(String name){ return name.replaceAll("[0-9 ]", ""); } //@@author A0143378Y protected static String reduceToInt(String name){ return name.replaceAll("[^0-9]", ""); } //@@author A0143378Y protected static String reduceToIntAndChar(String name){ return name.replaceAll("[^a-zA-Z0-9]", ""); } //@@author A0143378Y protected static String reduceToChar(String name){ return name.replaceAll("[^a-zA-Z]", ""); } //@@author A0143378Y /* * set dates * If day and month are appropriate, returns true */ private static boolean setDateIfContainDDMMYY(int day, int mth, int yr, Calendar setEvent){ int year = 2000 + yr; int month = mth -1; if(!(invalidMonth(month)||invalidDay(day))){ setEvent.set(Calendar.DATE, day); setEvent.set(Calendar.MONTH, month); setEvent.set(Calendar.YEAR, year); return true; }else{ return false; } } //@@author A0143378Y /* * Converts string to integer * If string is empty or contains non-number, print error message. */ private static int convertToInt(String value){ int i = INVALID_INT; try{ i= Integer.parseInt(value); }catch(NumberFormatException e){ System.err.println(INVALID_DATE_TIME+ e.getMessage()); } return i; } //@@author A0143378Y /* * Month is invalid if * Month is less than 0 * Month is greater than 11 */ private static boolean invalidMonth(int month){ return (month<0||month>11); } //@@author A0143378Y /* * returns true is day is 0 or greater than 31 */ private static boolean invalidDay(int day){ return (day<=0||day>31); } //@@author A0143378Y /* * returns true if time is set properly */ private static boolean setTimeIfHHMMSS(int hour, int minute, Calendar setEvent){ setEvent.set(Calendar.HOUR_OF_DAY, hour); setEvent.set(Calendar.MINUTE, minute); if(hour == 23 && minute == 59){ setEvent.set(Calendar.SECOND, 59); }else{ setEvent.set(Calendar.SECOND, 0); } return true; } //@@author A0143378Y /* * returns true if minute is negative or more than 59 */ private static boolean invalidMinute(int minute){ return (minute<0||minute>=60); } //@@author A0143378Y /* * returns true if hour is negative or more than 23 */ private static boolean invalidHour(int hour){ return (hour<0||hour>=24); } //@@author A0143378Y /* * Check that the word is a command word for date */ protected static int isCommandWord(String word){ switch(word){ case STRING_DEADLINE: return DEADLINE; case STRING_BY: return BY; case STRING_UNTIL: return UNTIL; case STRING_TILL: return TILL; case STRING_BEFORE: return BEFORE; case STRING_BETWEEN: return BETWEEN; case STRING_TO: return TO; case STRING_FROM: return FROM; case STRING_ON: return ON; case STRING_AND: return AND; default: return INVALID_STRING; } } //@@author A0143378Y /* * remove additional space between each word in case of typo */ protected static void removeAdditionalSpacesInCommand(){ String[] temp = command.split(" "); String newCommand = ""; for (int i = 0; i < temp.length; i++){ if(temp[i].length() != 0){ newCommand = newCommand + temp[i] + " "; } } setCommand(newCommand.trim()); } //@@author A0143378Y /* * Returns true if the command word is a command for start date */ protected static boolean isStartCommand(String word){ return (isCommandWord(word)>= FROM && isCommandWord(word)<= BETWEEN); } //@@author A0143378Y /* * Returns true if the command word is a command for end date */ protected static boolean isEndCommand(String word){ return (isCommandWord(word)>= BY && isCommandWord(word)<= AND); } //author A0143378Y private static void setDDMMYY(String[] details){ day = Integer.parseInt(details[0]); month = Integer.parseInt(details[1]); year = Integer.parseInt(details[2]); } //@@author A0143378Y private static void initialiseDate(){ day = INT_INVALID; month = INT_INVALID; year = INT_INVALID; } //@@author A0143378Y private static boolean checkIfDateIsNumeric(String[] details){ for(int i = 0; i < 3; i++ ){ if(!isNumeric(details[i])){ return false; } } return true; } }
package com.jetbrains.python.sdk; import com.intellij.execution.configurations.GeneralCommandLine; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; /** * @author yole */ public class IronPythonSdkFlavor extends PythonSdkFlavor { private IronPythonSdkFlavor() { } public static IronPythonSdkFlavor INSTANCE = new IronPythonSdkFlavor(); @Override public List<String> suggestHomePaths() { List<String> result = new ArrayList<String>(); String root = System.getenv("ProgramFiles(x86)"); if (root == null) { root = System.getenv("ProgramFiles"); } if (root != null) { final File[] dirs = new File(root).listFiles(); for (File dir : dirs) { if (dir.getName().startsWith("IronPython")) { File ipy = new File(dir, "ipy.exe"); if (ipy.exists()) { result.add(ipy.getPath()); } } } } return result; } @Override public boolean isValidSdkHome(String path) { final String name = new File(path).getName(); return name.equals("ipy.exe") || name.equals("ipy64.exe"); } @Override public String getVersionString(String sdkHome) { return "IronPython " + getVersionFromOutput(sdkHome, "-V", "\\w+ ([0-9\\.]+).*", true); } @Override public Collection<String> getExtraDebugOptions() { return Collections.singletonList("-X:Frames"); } @Override public void addToPythonPath(GeneralCommandLine cmd, String path) { addToEnv(cmd, path, "IRONPYTHONPATH"); } }
package analysis.dynamicsim; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import javax.swing.JFrame; import javax.swing.JProgressBar; import javax.xml.stream.XMLStreamException; import main.util.MutableBoolean; import odk.lang.FastMath; import org.apache.commons.math3.exception.DimensionMismatchException; import org.apache.commons.math3.exception.MaxCountExceededException; import org.apache.commons.math3.ode.FirstOrderDifferentialEquations; import org.apache.commons.math3.ode.nonstiff.HighamHall54Integrator; import org.sbml.jsbml.ASTNode; import org.sbml.jsbml.AssignmentRule; import org.sbml.jsbml.RateRule; import analysis.dynamicsim.HierarchicalSimulator.ModelState; import analysis.dynamicsim.HierarchicalSimulator.StringDoublePair; public class SimulatorHybridHierarchical extends HierarchicalSimulator { private static Long initializationTime = new Long(0); private String modelstateID; int numSteps; double relativeError; double absoluteError; DiffEquations[] functions; public SimulatorHybridHierarchical(String SBMLFileName, String outputDirectory, double timeLimit, double maxTimeStep, double minTimeStep, long randomSeed, JProgressBar progress, double printInterval, double stoichAmpValue, JFrame running, String[] interestingSpecies, int numSteps, double relError, double absError, String quantityType) throws IOException, XMLStreamException { super(SBMLFileName, outputDirectory, timeLimit, maxTimeStep, minTimeStep, randomSeed, progress, printInterval, initializationTime, stoichAmpValue, running, interestingSpecies, quantityType); this.numSteps = numSteps; relativeError = relError; absoluteError = absError; modelstateID = "topmodel"; functions = new DiffEquations[numSubmodels + 1]; try { initialize(randomSeed, 1); } catch (IOException e2) { e2.printStackTrace(); } } private void initialize(long randomSeed, int runNumber) throws IOException { int index = 0; setupSpecies(topmodel); setupParameters(topmodel); setupConstraints(topmodel); setupInitialAssignments(topmodel); setupRules(topmodel); setupReactions(topmodel); setupEvents(topmodel); setupFunctionDefinition(topmodel); functions[index++] = new DiffEquations(new VariableState(topmodel)); setupForOutput(randomSeed, runNumber); for(ModelState model : submodels.values()) { setupSpecies(model); setupParameters(model); setupConstraints(model); setupInitialAssignments(model); setupRules(model); setupReactions(model); setupEvents(model); setupFunctionDefinition(model); functions[index++] = new DiffEquations(new VariableState(model)); setupForOutput(randomSeed, runNumber); } setupReplacingSpecies(); setupForOutput(randomSeed, runNumber); bufferedTSDWriter.write("(" + "\"" + "time" + "\""); for (String speciesID : topmodel.speciesIDSet) if(replacements.containsKey(speciesID)) { if(replacementSubModels.get(speciesID).contains("topmodel")) bufferedTSDWriter.write(", \"" + speciesID + "\""); } else { bufferedTSDWriter.write(", \"" + speciesID + "\""); } for (String noConstantParam : topmodel.nonconstantParameterIDSet) if(replacements.containsKey(noConstantParam)) { if(replacementSubModels.get(noConstantParam).contains("topmodel")) bufferedTSDWriter.write(", \"" + noConstantParam + "\""); } else { bufferedTSDWriter.write(", \"" + noConstantParam + "\""); } /* for (String compartment : topmodel.compartmentIDSet) { bufferedTSDWriter.write(", \"" + compartment + "\""); } */ for(ModelState model : submodels.values()) { for (String speciesID : model.speciesIDSet) if(replacements.containsKey(speciesID)) { if(!replacementSubModels.get(speciesID).contains(model.ID)) bufferedTSDWriter.write(", \"" + model.ID + "__" + speciesID + "\""); } else { bufferedTSDWriter.write(", \"" + model.ID + "__" + speciesID + "\""); } for (String noConstantParam : model.nonconstantParameterIDSet) if(replacements.containsKey(noConstantParam)) { if(!replacementSubModels.get(noConstantParam).contains(model.ID)) bufferedTSDWriter.write(", \"" + model.ID + "__" + noConstantParam + "\""); } else { bufferedTSDWriter.write(", \"" + model.ID + "__" + noConstantParam + "\""); } /* for (String compartment : model.compartmentIDSet) bufferedTSDWriter.write(", \"" + model.ID + "__" + compartment + "\""); */ } bufferedTSDWriter.write("),\n"); } @Override protected void simulate() { // TODO Auto-generated method stub if (sbmlHasErrorsFlag) return; //SIMULATION LOOP currentTime = 0.0; double printTime = printInterval; HighamHall54Integrator odecalc = new HighamHall54Integrator(0, maxTimeStep, relativeError, absoluteError); double nextEventTime = handleEvents(); while (currentTime < timeLimit && !cancelFlag && constraintFlag) { //EVENT HANDLING //trigger and/or fire events, etc. if (topmodel.noEventsFlag == false) { HashSet<String> affectedReactionSet = fireEvents(topmodel, topmodel.noRuleFlag, topmodel.noConstraintsFlag); //recalculate propensties/groups for affected reactions if (affectedReactionSet.size() > 0) updatePropensities(affectedReactionSet, "topmodel"); } for(ModelState models : submodels.values()) { if (models.noEventsFlag == false) { HashSet<String> affectedReactionSet = fireEvents(models, models.noRuleFlag, models.noConstraintsFlag); //recalculate propensties/groups for affected reactions if (affectedReactionSet.size() > 0) updatePropensities(affectedReactionSet, models.ID); } } //STEP 1: generate random numbers double p1 = randomNumberGenerator.nextDouble(); double p2 = randomNumberGenerator.nextDouble(); //STEP 2: calculate delta_t, the time till the next reaction execution double totalPropensity = getTotalPropensity(); double tau = -FastMath.log(p1)/totalPropensity; double reactionStep = currentTime + tau; if (reactionStep < nextEventTime && reactionStep < currentTime + maxTimeStep) { for(DiffEquations eq : functions) { odecalc.integrate(eq, currentTime, eq.state.values, reactionStep, eq.state.values); updateValuesArray(); } currentTime = reactionStep; // perform reaction } else if (nextEventTime < currentTime + maxTimeStep) { currentTime = nextEventTime; // print } else { currentTime += maxTimeStep; // print } if (currentTime > timeLimit) { currentTime = timeLimit; } while (currentTime > printTime && printTime < timeLimit) { try { printToTSD(printTime); bufferedTSDWriter.write(",\n"); } catch (IOException e) { e.printStackTrace(); } printTime += printInterval; running.setTitle("Progress (" + (int)((currentTime / timeLimit) * 100.0) + "%)"); //update progress bar progress.setValue((int)((currentTime / timeLimit) * 100.0)); } if (currentTime == reactionStep) { //STEP 3: select a reaction String selectedReactionID = selectReaction(p2); //if its length isn't positive then there aren't any reactions if (!selectedReactionID.isEmpty()) { //STEP 4: perform selected reaction and update species counts if(modelstateID.equals("topmodel")) { //if its length isn't positive then there aren't any reactions if (!selectedReactionID.isEmpty()) { performReaction(topmodel, selectedReactionID, topmodel.noRuleFlag, topmodel.noConstraintsFlag); HashSet<String> affectedReactionSet = getAffectedReactionSet(topmodel, selectedReactionID, true); //STEP 5: compute affected reactions' new propensities and update total propensity updatePropensities(affectedReactionSet, modelstateID); } } else { //if its length isn't positive then there aren't any reactions if (!selectedReactionID.isEmpty()) { performReaction(submodels.get(modelstateID), selectedReactionID, submodels.get(modelstateID).noRuleFlag, submodels.get(modelstateID).noConstraintsFlag); HashSet<String> affectedReactionSet = getAffectedReactionSet(submodels.get(modelstateID), selectedReactionID, true); //STEP 5: compute affected reactions' new propensities and update total propensity updatePropensities(affectedReactionSet, modelstateID); } } } } updateRules(); //update time for next iteration //currentTime += delta_t; } //end simulation loop if (cancelFlag == false) { //print the final species counts try { printToTSD(printTime); } catch (IOException e) { e.printStackTrace(); } try { bufferedTSDWriter.write(')'); bufferedTSDWriter.flush(); } catch (IOException e1) { e1.printStackTrace(); } } } /** * updates the propensities of the reactions affected by the recently performed reaction * @param affectedReactionSet the set of reactions affected by the recently performed reaction */ private void updatePropensities(HashSet<String> affectedReactionSet, String id) { //loop through the affected reactions and update the propensities for (String affectedReactionID : affectedReactionSet) { if(id.equals("topmodel")) { HashSet<StringDoublePair> reactantStoichiometrySet = topmodel.reactionToReactantStoichiometrySetMap.get(affectedReactionID); updatePropensities(topmodel, affectedReactionSet,affectedReactionID, reactantStoichiometrySet); } else { HashSet<StringDoublePair> reactantStoichiometrySet = submodels.get(id).reactionToReactantStoichiometrySetMap.get(affectedReactionID); updatePropensities(submodels.get(id), affectedReactionSet,affectedReactionID, reactantStoichiometrySet); } } } /** * Helper method */ private void updatePropensities(ModelState model, HashSet<String> affectedReactionSet, String affectedReactionID, HashSet<StringDoublePair> reactantStoichiometrySet) { boolean notEnoughMoleculesFlag = false; //check for enough molecules for the reaction to occur for (StringDoublePair speciesAndStoichiometry : reactantStoichiometrySet) { String speciesID = speciesAndStoichiometry.string; double stoichiometry = speciesAndStoichiometry.doub; //if there aren't enough molecules to satisfy the stoichiometry if (model.variableToValueMap.get(speciesID) < stoichiometry) { notEnoughMoleculesFlag = true; break; } } double newPropensity = 0.0; if (notEnoughMoleculesFlag == false) { newPropensity = evaluateExpressionRecursive(model, model.reactionToFormulaMap.get(affectedReactionID)); //newPropensity = CalculatePropensityIterative(affectedReactionID); } double oldPropensity = model.reactionToPropensityMap.get(affectedReactionID); //add the difference of new v. old propensity to the total propensity model.propensity += newPropensity - oldPropensity; //totalPropensity += newPropensity - oldPropensity; model.reactionToPropensityMap.put(affectedReactionID, newPropensity); } private String selectReaction(double r2) { double randomPropensity = r2 * (getTotalPropensity()); double runningTotalReactionsPropensity = 0.0; String selectedReaction = ""; //finds the reaction that the random propensity lies in //it keeps adding the next reaction's propensity to a running total //until the running total is greater than the random propensity for (String currentReaction : topmodel.reactionToPropensityMap.keySet()) { runningTotalReactionsPropensity += topmodel.reactionToPropensityMap.get(currentReaction); if (randomPropensity < runningTotalReactionsPropensity) { selectedReaction = currentReaction; // keep track of submodel index modelstateID = "topmodel"; return selectedReaction; } } for(ModelState models : submodels.values()) { for (String currentReaction : models.reactionToPropensityMap.keySet()) { runningTotalReactionsPropensity += models.reactionToPropensityMap.get(currentReaction); if (randomPropensity < runningTotalReactionsPropensity) { selectedReaction = currentReaction; // keep track of submodel index modelstateID = models.ID; return selectedReaction; } } } return selectedReaction; } @Override protected void cancel() { cancelFlag = true; } @Override protected void clear() { topmodel.clear(); for(int i = 0; i < this.numSubmodels; i++) submodels.clear(); for(String key : replacements.keySet()) replacements.put(key, initReplacementState.get(key)); } @Override protected void setupForNewRun(int newRun) { // TODO Auto-generated method stub } protected double handleEvents() { double nextEventTime = Double.POSITIVE_INFINITY; if (topmodel.noEventsFlag == false) { handleEvents(topmodel, topmodel.noRuleFlag, topmodel.noConstraintsFlag); //step to the next event fire time if it comes before the next time step if (!topmodel.triggeredEventQueue.isEmpty() && topmodel.triggeredEventQueue.peek().fireTime <= nextEventTime) if(topmodel.triggeredEventQueue.peek().fireTime < nextEventTime) nextEventTime = topmodel.triggeredEventQueue.peek().fireTime; } for(ModelState models : submodels.values()) if (models.noEventsFlag == false){ handleEvents(models, models.noRuleFlag, models.noConstraintsFlag); //step to the next event fire time if it comes before the next time step if (!models.triggeredEventQueue.isEmpty() && models.triggeredEventQueue.peek().fireTime <= nextEventTime) if(models.triggeredEventQueue.peek().fireTime < nextEventTime) nextEventTime = models.triggeredEventQueue.peek().fireTime; } return nextEventTime; } private class DiffEquations implements FirstOrderDifferentialEquations { VariableState state; public DiffEquations(VariableState state) { this.state = state; } @Override public void computeDerivatives(double t, double[] y, double[] currValueChanges) throws MaxCountExceededException, DimensionMismatchException { HashSet<AssignmentRule> affectedAssignmentRuleSet = new HashSet<AssignmentRule>(); for (int i = 0; i < y.length; i++) state.modelstate.setvariableToValueMap(state.indexToVariableMap.get(i), y[i]); //calculate the current variable values //based on the ODE system for (int i = 0; i < currValueChanges.length; i++) { String currentVar = state.indexToVariableMap.get(i); if ((state.modelstate.speciesIDSet.contains(currentVar) && state.modelstate.speciesToIsBoundaryConditionMap.get(currentVar) == false) && (state.modelstate.variableToValueMap.contains(currentVar)) && state.modelstate.variableToIsConstantMap.get(currentVar) == false) { currValueChanges[i] = evaluateExpressionRecursive(state.modelstate, state.dvariablesdtime[i]); //if (currValueChanges[i]!=0) { // System.out.println(indexToVariableMap.get(i) + "= " + dvariablesdtime[i].toFormula() + "=" + currValueChanges[i]); } else currValueChanges[i] = 0; if (state.modelstate.variableToIsInAssignmentRuleMap != null && state.modelstate.variableToIsInAssignmentRuleMap.containsKey(currentVar) && state.modelstate.variableToValueMap.contains(currentVar) && state.modelstate.variableToIsInAssignmentRuleMap.get(currentVar) == true) affectedAssignmentRuleSet.addAll(state.modelstate.variableToAffectedAssignmentRuleSetMap.get(currentVar)); // if (variableToIsInConstraintMap.get(speciesID) == true) // affectedConstraintSet.addAll(variableToAffectedConstraintSetMap.get(speciesID)); } //updatePropensities(performRateRules(topmodel, currentTime), "topmodel"); performRateRules(state.modelstate, state.variableToIndexMap, currValueChanges); //if assignment rules are performed, these changes need to be reflected in the currValueChanges //that get passed back if (affectedAssignmentRuleSet.size() > 0) { HashSet<String> affectedVariables = performAssignmentRules(state.modelstate, affectedAssignmentRuleSet); for (String affectedVariable : affectedVariables) { int index = state.variableToIndexMap.get(affectedVariable); currValueChanges[index] = state.modelstate.getVariableToValue(affectedVariable) - y[index]; } } } @Override public int getDimension() { return state.values.length; } /** * performs every rate rule using the current time step * * @param delta_t * @return */ protected HashSet<String> performRateRules(ModelState modelstate, HashMap<String, Integer> variableToIndexMap, double[] currValueChanges) { HashSet<String> affectedVariables = new HashSet<String>(); for (RateRule rateRule : modelstate.rateRulesList) { String variable = rateRule.getVariable(); //update the species count (but only if the species isn't constant) (bound cond is fine) if (modelstate.variableToIsConstantMap.containsKey(variable) && modelstate.variableToIsConstantMap.get(variable) == false) { if (modelstate.speciesToHasOnlySubstanceUnitsMap.containsKey(variable) && modelstate.speciesToHasOnlySubstanceUnitsMap.get(variable) == false) { if(!variableToIndexMap.containsKey(variable)) continue; int index = variableToIndexMap.get(variable); if(index > currValueChanges.length) continue; double value = (evaluateExpressionRecursive(modelstate, rateRule.getMath()) * modelstate.getVariableToValue(modelstate.speciesToCompartmentNameMap.get(variable))); currValueChanges[index] = value; //modelstate.setvariableToValueMap(variable, value); } else { if(!variableToIndexMap.containsKey(variable)) continue; int index = variableToIndexMap.get(variable); if(index > currValueChanges.length) continue; double value = evaluateExpressionRecursive(modelstate, rateRule.getMath()); currValueChanges[index] = value; //modelstate.setvariableToValueMap(variable, value); } affectedVariables.add(variable); } } return affectedVariables; } } private void updateValuesArray() { int index = 0; //convert variableToValueMap into two arrays //and create a hashmap to find indices for(DiffEquations eq : functions) { for (String variable : eq.state.modelstate.variableToValueMap.keySet()) { eq.state.values[index] = eq.state.modelstate.getVariableToValue(variable); ++index; } index = 0; } } protected class VariableState { ModelState modelstate; String[] variables; double[] values; ASTNode[] dvariablesdtime; HashMap<String, Integer> variableToIndexMap; HashMap<Integer, String> indexToVariableMap; MutableBoolean eventsFlag = new MutableBoolean(false); MutableBoolean rulesFlag = new MutableBoolean(false); MutableBoolean constraintsFlag = new MutableBoolean(false); protected VariableState(ModelState modelstate) { this.modelstate = modelstate; variables = new String[modelstate.variableToValueMap.size()]; values = new double[modelstate.variableToValueMap.size()]; dvariablesdtime = new ASTNode[modelstate.variableToValueMap.size()]; variableToIndexMap = new HashMap<String, Integer>(modelstate.variableToValueMap.size()); indexToVariableMap = new HashMap<Integer, String>(modelstate.variableToValueMap.size()); int index = 0; //convert variableToValueMap into two arrays //and create a hashmap to find indices for (String variable : modelstate.variableToValueMap.keySet()) { variables[index] = variable; values[index] = modelstate.getVariableToValue(variable); variableToIndexMap.put(variable, index); dvariablesdtime[index] = new ASTNode(); dvariablesdtime[index].setValue(0); indexToVariableMap.put(index, variable); ++index; } //create system of ODEs for the change in variables for (String reaction : modelstate.reactionToFormulaMap.keySet()) { ASTNode formula = modelstate.reactionToFormulaMap.get(reaction); //System.out.println("HERE: " + formula.toFormula()); HashSet<StringDoublePair> reactantAndStoichiometrySet = modelstate.reactionToReactantStoichiometrySetMap.get(reaction); HashSet<StringDoublePair> speciesAndStoichiometrySet = modelstate.reactionToSpeciesAndStoichiometrySetMap.get(reaction); //loop through reactants for (StringDoublePair reactantAndStoichiometry : reactantAndStoichiometrySet) { String reactant = reactantAndStoichiometry.string; double stoichiometry = reactantAndStoichiometry.doub; int varIndex = variableToIndexMap.get(reactant); ASTNode stoichNode = new ASTNode(); stoichNode.setValue(-1 * stoichiometry); dvariablesdtime[varIndex] = ASTNode.sum(dvariablesdtime[varIndex], ASTNode.times(formula,stoichNode)); } //loop through products for (StringDoublePair speciesAndStoichiometry : speciesAndStoichiometrySet) { String species = speciesAndStoichiometry.string; double stoichiometry = speciesAndStoichiometry.doub; //if it's a product its stoichiometry will be positive //(and if it's a reactant it'll be negative) if (stoichiometry > 0) { int varIndex = variableToIndexMap.get(species); ASTNode stoichNode = new ASTNode(); stoichNode.setValue(stoichiometry); dvariablesdtime[varIndex] = ASTNode.sum(dvariablesdtime[varIndex], ASTNode.times(formula,stoichNode)); } } } } } }
package org.broadinstitute.sting.alignment.bwa.packing; import org.broadinstitute.sting.utils.StingException; import java.io.*; import java.nio.ByteOrder; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; /** * Reads a packed version of the input stream. * * @author mhanna * @version 0.1 */ public class BasePackedInputStream<T> { /** * Type of object to unpack. */ private final Class<T> type; /** * Ultimate source for packed bases. */ private final FileInputStream targetInputStream; /** * Channel source for packed bases. */ private final FileChannel targetInputChannel; /** * A fixed-size buffer for word-packed data. */ private final ByteOrder byteOrder; /** * How many bases are in a given packed word. */ private final int basesPerPackedWord = PackUtils.bitsInType(Integer.class)/PackUtils.BITS_PER_BASE; /** * How many bytes in an integer? */ private final int bytesPerInteger = PackUtils.bitsInType(Integer.class)/PackUtils.BITS_PER_BYTE; public BasePackedInputStream( Class<T> type, File inputFile, ByteOrder byteOrder ) throws FileNotFoundException { this(type,new FileInputStream(inputFile),byteOrder); } public BasePackedInputStream( Class<T> type, FileInputStream inputStream, ByteOrder byteOrder ) { if( type != Integer.class ) throw new StingException("Only bases packed into 32-bit words are currently supported by this input stream. Type specified: " + type.getName()); this.type = type; this.targetInputStream = inputStream; this.targetInputChannel = inputStream.getChannel(); this.byteOrder = byteOrder; } /** * Read the entire contents of the input stream. * @param bwt array into which bases should be read. * @throws IOException if an I/O error occurs. */ public void read(byte[] bwt) throws IOException { read(bwt,0,bwt.length); } /** * Read the next <code>length</code> bases into the bwt array, starting at the given offset. * @param bwt array holding the given data. * @param offset target position in the bases array into which bytes should be written. * @param length number of bases to read from the stream. * @throws IOException if an I/O error occurs. */ public void read(byte[] bwt, int offset, int length) throws IOException { int bufferWidth = ((bwt.length+basesPerPackedWord-1)/basesPerPackedWord)*bytesPerInteger; ByteBuffer buffer = ByteBuffer.allocate(bufferWidth).order(byteOrder); targetInputChannel.read(buffer); targetInputChannel.position(targetInputChannel.position()+buffer.remaining()); buffer.flip(); int packedWord = 0; int i = 0; while(i < length) { if(i % basesPerPackedWord == 0) packedWord = buffer.getInt(); int position = basesPerPackedWord - i%basesPerPackedWord - 1; bwt[offset+i++] = PackUtils.unpackBase((byte)((packedWord >> position*PackUtils.BITS_PER_BASE) & 0x3)); } } }
package info.u_team.u_team_core.gui; import info.u_team.u_team_core.container.*; import net.minecraft.client.gui.screen.inventory.ContainerScreen; import net.minecraft.entity.player.PlayerInventory; import net.minecraft.inventory.container.Container; import net.minecraft.util.text.ITextComponent; import net.minecraftforge.api.distmarker.*; @OnlyIn(Dist.CLIENT) public abstract class FluidContainerScreen<T extends Container> extends ContainerScreen<T> { public FluidContainerScreen(T screenContainer, PlayerInventory inv, ITextComponent titleIn) { super(screenContainer, inv, titleIn); } @Override protected void drawGuiContainerForegroundLayer(int mouseX, int mouseY) { if (container instanceof FluidContainer) { final FluidContainer fluidContainer = (FluidContainer) container; for (int index = 0; index < fluidContainer.fluidSlots.size(); index++) { drawFluidSlot(fluidContainer.fluidSlots.get(index)); } } } private void drawFluidSlot(FluidSlot slot) { // final int x = slot.getX(); // final int y = slot.getY(); // final FluidStack stack = slot.getStack(); // TODO } }
package org.biojava.bio.gui.sequence; import java.awt.Graphics2D; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import org.biojava.bio.symbol.Location; import org.biojava.bio.symbol.RangeLocation; /** * * * @author Matthew Pocock * @author Kalle N&auml;slund * @since 1.4 */ public final class GUITools { private GUITools() {} public static Location getVisibleRange(SequenceRenderContext src, Graphics2D g2) { Rectangle2D clip = g2.getClipBounds(); int min = Math.max( src.getRange().getMin(), src.graphicsToSequence( new Point2D.Double(clip.getMinX(), clip.getMinY()) ) - 1 ); int max = Math.min( src.getRange().getMax(), src.graphicsToSequence( new Point2D.Double(clip.getMaxX(), clip.getMaxY()) ) + 1 ); // this happens when the clip region doesnt overlap with the SymbolList range if( min > max ) { return Location.empty; } else { return new RangeLocation(min, max); } } public static Rectangle2D createOuterBounds(CircularRendererContext crc, double depth) { // do we need some extra data in crc to deal with origins not at 0? double outer = crc.getRadius() + depth; return new Rectangle2D.Double(-outer, -outer, 2.0 * outer, 2.0 * outer); } public static Rectangle2D createInnerBounds(CircularRendererContext crc) { // do we need some extra data in crc to deal with origins not at 0? double outer = crc.getRadius(); return new Rectangle2D.Double(-outer, -outer, 2.0 * outer, 2.0 * outer); } }
package org.javarosa.xform.util; import java.util.Vector; import org.javarosa.core.model.Constants; import org.javarosa.core.model.DataBinding; import org.javarosa.core.model.IFormElement; import org.javarosa.core.model.QuestionDef; import org.javarosa.core.model.data.DateData; import org.javarosa.core.model.data.IAnswerData; import org.javarosa.core.model.data.SelectMultiData; import org.javarosa.core.model.data.SelectOneData; import org.javarosa.core.model.data.Selection; import org.javarosa.core.model.data.StringData; import org.javarosa.core.model.utils.DateUtils; import org.javarosa.core.util.Map; import org.javarosa.xform.parse.XFormParser; import org.kxml2.kdom.Element; /** * The XFormAnswerDataParser is responsible for taking XForms elements and * parsing them into a specific type of IAnswerData. * * @author Clayton Sims * */ public class XFormAnswerDataParser { Map parsers; public static IAnswerData getAnswerData(Vector formElements, DataBinding binding, Element node) { // TODO: This should be a set of Handlers, not a switch String value = XFormParser.getXMLText(node, false); if (value == null) return null; int dataType = 0; if (binding == null) { dataType = ((QuestionDef) formElements.elementAt(0)).getDataType(); } else { dataType = binding.getDataType(); } switch (dataType) { case Constants.DATATYPE_DATE: return value.trim().length() == 0 ? null : new DateData(DateUtils .getDateFromString(value)); case Constants.DATATYPE_DATE_TIME: // We need to get datetime here, not date return value.trim().length() == 0 ? null : new DateData(DateUtils .getDateTimeFromString(value)); case Constants.DATATYPE_INTEGER: // return value.trim().length() == 0 ? null : new // IntegerData(Integer.parseInt(value)); return new StringData(value); case Constants.DATATYPE_DECIMAL: // BWD 6.Dec.2008. copying this one from the integer example return new StringData(value); case Constants.DATATYPE_TEXT: if (formElements.isEmpty()) { return new StringData(value); } else { QuestionDef questionDef = (QuestionDef) formElements.elementAt(0); // TODO (JMT) this cast is not good if (questionDef.getSelectItemIDs() == null) { return new StringData(value); } else { int controlType = questionDef.getControlType(); switch (controlType) { case Constants.CONTROL_SELECT_ONE: Selection selection = getSelection(value, formElements); return new SelectOneData(selection); case Constants.CONTROL_SELECT_MULTI: String[] splitValues = split(value, XFormAnswerDataSerializer.DELIMITER); Vector selections = new Vector(); for (int i = 0; i < splitValues.length; i++) { selection = getSelection(splitValues[i], formElements); if(selection != null){ selections.addElement(selection); } } return new SelectMultiData(selections); } } } case Constants.DATATYPE_TIME: // Come up with a parser for this. return null; } return null; } private static Selection getSelection(String value, Vector formElements) { IFormElement element; QuestionDef questionDef; for (int i = 0; i < formElements.size(); i++) { element = (IFormElement) formElements.elementAt(i); if(element instanceof QuestionDef){ questionDef = (QuestionDef) element; questionDef.localizeSelectMap(null); int index = questionDef.getSelectedItemIndex(value); if(index != -1){ return new Selection(index, questionDef); } } } return null; } // TODO (JMT) this methods is an util method, put it in a util class public static String[] split(String original, String delimiter) { Vector nodes = new Vector(); // Parse nodes into vector int index = original.indexOf(delimiter); while (index >= 0) { nodes.addElement(original.substring(0, index)); original = original.substring(index + delimiter.length()); index = original.indexOf(delimiter); } // Get the last node nodes.addElement(original); // Create splitted string array String[] result = new String[nodes.size()]; if (nodes.size() > 0) { for (int loop = 0; loop < nodes.size(); loop++) { result[loop] = (String) nodes.elementAt(loop); } } return result; } }
package org.gbif.nameparser; import java.io.BufferedReader; import java.io.InputStreamReader; import java.io.Reader; import java.io.UnsupportedEncodingException; import com.google.common.collect.Iterables; import org.apache.commons.io.LineIterator; import org.gbif.nameparser.api.*; import org.junit.AfterClass; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.gbif.nameparser.api.NamePart.INFRASPECIFIC; import static org.gbif.nameparser.api.NamePart.SPECIFIC; import static org.gbif.nameparser.api.NameType.*; import static org.gbif.nameparser.api.NomCode.*; import static org.gbif.nameparser.api.Rank.*; import static org.junit.Assert.*; public abstract class NameParserTest { private static Logger LOG = LoggerFactory.getLogger(NameParserTest.class); private final NameParser parser; protected NameParserTest(NameParser parser) { this.parser = parser; } @Test public void nomRefs() throws Exception { assertName("Passiflora plumosa Feuillet & Cremers, Proceedings of the Koninklijke Nederlandse Akademie van Wetenschappen, Series C: Biological and Medical Sciences 87(3): 381, f. 2. 1984. Fig. 2I, J", "Passiflora plumosa") .species("Passiflora", "plumosa") .combAuthors(null, "Feuillet", "Cremers") .partial(", Proceedings of the Koninklijke Nederlandse Akademie van Wetenschappen, Series C: Biological and Medical Sciences 87(3): 381, f. 2. 1984. Fig. 2I, J") .warning(Warnings.NOMENCLATURAL_REFERENCE) .nothingElse(); assertName("Passiflora jussieui Feuillet, Journal of the Botanical Research Institute of Texas 4(2): 611, f. 1. 2010. Figs 2E, F, 3E, F", "Passiflora jussieui") .species("Passiflora", "jussieui") .combAuthors(null, "Feuillet") .partial(", Journal of the Botanical Research Institute of Texas 4(2): 611, f. 1. 2010. Figs 2E, F, 3E, F") .warning(Warnings.NOMENCLATURAL_REFERENCE) .nothingElse(); assertName("Passiflora eglandulosa J.M. MacDougal. Annals of the Missouri Botanical Garden 75: 1658-1662. figs 1, 2B, and 3. 1988. Figs 36-37", "Passiflora eglandulosa") .species("Passiflora", "eglandulosa") .combAuthors(null, "J.M.MacDougal") .partial(". Annals of the Missouri Botanical Garden 75: 1658-1662. figs 1, 2B, and 3. 1988. Figs 36-37") .warning(Warnings.NOMENCLATURAL_REFERENCE) .nothingElse(); assertName("Passiflora eglandulosa J.M. MacDougal. Lingua franca de Missouri Botanical Garden 75: 1658-1662. figs 1, 2B, and 3. 1988. Figs 36-37", "Passiflora eglandulosa") .species("Passiflora", "eglandulosa") .combAuthors(null, "J.M.MacDougal") .partial(". Lingua franca de Missouri Botanical Garden 75: 1658-1662. figs 1, 2B, and 3. 1988. Figs 36-37") .warning(Warnings.NOMENCLATURAL_REFERENCE) .nothingElse(); } @Test public void blacklisted() throws Exception { assertName("Passiflora possible Müller", "Passiflora possible") .species("Passiflora", "possible") .combAuthors(null, "Müller") .doubtful() .warning(Warnings.BLACKLISTED_EPITHET) .nothingElse(); // undetected nom rel // make sure we blacklist the shit epithet, as we have received such data in the past assertName("Passiflora eglandulosa J.M. MacDougal. Lingua shit de Missouri Botanical Garden 75. figs 1, 2B, and 3. 1988. Figs 36-37", "Passiflora eglandulosa shit") .infraSpecies("Passiflora", "eglandulosa", INFRASPECIFIC_NAME, "shit") .combAuthors(null, "de Missouri Botanical Garden") .doubtful() .partial("75.figs 1,2B,&3.1988.Figs 36-37") .warning(Warnings.BLACKLISTED_EPITHET) .nothingElse(); } @Test public void species() throws Exception { assertName("Diodia teres Walter", "Diodia teres") .species("Diodia", "teres") .combAuthors(null, "Walter") .nothingElse(); assertName("Dysponetus bulbosus Hartmann-Schroder 1982", "Dysponetus bulbosus") .species("Dysponetus", "bulbosus") .combAuthors("1982", "Hartmann-Schroder") .code(ZOOLOGICAL) .nothingElse(); assertName("Zophosis persis (Chatanay 1914)", "Zophosis persis") .species("Zophosis", "persis") .basAuthors("1914", "Chatanay") .code(ZOOLOGICAL) .nothingElse(); assertName("Abies alba Mill.", "Abies alba") .species("Abies", "alba") .combAuthors(null, "Mill.") .nothingElse(); assertName("Alstonia vieillardii Van Heurck & Müll.Arg.", "Alstonia vieillardii") .species("Alstonia", "vieillardii") .combAuthors(null, "Van Heurck", "Müll.Arg.") .nothingElse(); assertName("Angiopteris d'urvilleana de Vriese", "Angiopteris d'urvilleana") .species("Angiopteris", "d'urvilleana") .combAuthors(null, "de Vriese") .nothingElse(); assertName("Agrostis hyemalis (Walter) Britton, Sterns, & Poggenb.", "Agrostis hyemalis") .species("Agrostis", "hyemalis") .combAuthors(null, "Britton", "Sterns", "Poggenb.") .basAuthors(null, "Walter") .code(BOTANICAL) .nothingElse(); } @Test public void specialEpithets() throws Exception { assertName("Gracillaria v-flava Haworth, 1828", "Gracillaria v-flava") .species("Gracillaria", "v-flava") .combAuthors("1828", "Haworth") .code(ZOOLOGICAL) .nothingElse(); } @Test public void capitalAuthors() throws Exception { assertName("Anniella nigra FISCHER 1885", "Anniella nigra") .species("Anniella", "nigra") .combAuthors("1885", "Fischer") .code(ZOOLOGICAL) .nothingElse(); } @Test public void infraSpecies() throws Exception { assertName("Poa pratensis subsp. anceps (Gaudin) Dumort., 1824", Rank.SPECIES, "Poa pratensis subsp. anceps") .infraSpecies("Poa", "pratensis", Rank.SUBSPECIES, "anceps") .basAuthors(null, "Gaudin") .combAuthors("1824", "Dumort.") .code(BOTANICAL) .warning(Warnings.SUBSPECIES_ASSIGNED) .nothingElse(); assertName("Abies alba ssp. alpina Mill.", "Abies alba alpina") .infraSpecies("Abies", "alba", SUBSPECIES, "alpina") .combAuthors(null, "Mill.") .nothingElse(); assertName("Festuca ovina L. subvar. gracilis Hackel", "Festuca ovina subvar. gracilis") .infraSpecies("Festuca", "ovina", SUBVARIETY, "gracilis") .combAuthors(null, "Hackel") .nothingElse(); assertName("Pseudomonas syringae pv. aceris (Ark, 1939) Young, Dye & Wilkie, 1978", "Pseudomonas syringae pv. aceris") .infraSpecies("Pseudomonas", "syringae", PATHOVAR, "aceris") .combAuthors("1978", "Young", "Dye", "Wilkie") .basAuthors("1939", "Ark") .code(BACTERIAL) .nothingElse(); assertName("Agaricus compactus sarcocephalus (Fr.) Fr. ", "Agaricus compactus sarcocephalus") .infraSpecies("Agaricus", "compactus", INFRASPECIFIC_NAME, "sarcocephalus") .combAuthors(null, "Fr.") .basAuthors(null, "Fr.") .code(BOTANICAL) .nothingElse(); assertName("Baccharis microphylla Kunth var. rhomboidea Wedd. ex Sch. Bip. (nom. nud.)", "Baccharis microphylla var. rhomboidea") .infraSpecies("Baccharis", "microphylla", VARIETY, "rhomboidea") .combAuthors(null, "Sch.Bip.") .combExAuthors("Wedd.") .nomNote("nom.nud.") .nothingElse(); assertName("Achillea millefolium subsp. pallidotegula B. Boivin var. pallidotegula", "Achillea millefolium var. pallidotegula") .infraSpecies("Achillea", "millefolium", Rank.VARIETY, "pallidotegula") .warning("Intermediate classification removed: subsp.pallidotegula B.Boivin ") .nothingElse(); assertName("Achillea millefolium var. pallidotegula", Rank.INFRASPECIFIC_NAME, "Achillea millefolium var. pallidotegula") .infraSpecies("Achillea", "millefolium", Rank.VARIETY, "pallidotegula") .nothingElse(); } @Test public void exAuthors() throws Exception { assertName("Acacia truncata (Burm. f.) hort. ex Hoffmanns.", "Acacia truncata") .species("Acacia", "truncata") .basAuthors(null, "Burm.f.") .combExAuthors("hort.") .combAuthors(null, "Hoffmanns.") .code(BOTANICAL) .nothingElse(); // In botany (99% of ex author use) the ex author comes first, see https://en.wikipedia.org/wiki/Author_citation_(botany)#Usage_of_the_term_.22ex.22 assertName("Gymnocalycium eurypleurumn Plesn¡k ex F.Ritter", "Gymnocalycium eurypleurumn") .species("Gymnocalycium", "eurypleurumn") .combAuthors(null, "F.Ritter") .combExAuthors("Plesnik") .doubtful() .warning(Warnings.UNUSUAL_CHARACTERS) .nothingElse(); assertName("Abutilon bastardioides Baker f. ex Rose", "Abutilon bastardioides") .species("Abutilon", "bastardioides") .combAuthors(null, "Rose") .combExAuthors("Baker f.") .nothingElse(); assertName("Baccharis microphylla Kunth var. rhomboidea Wedd. ex Sch. Bip. (nom. nud.)", "Baccharis microphylla var. rhomboidea") .infraSpecies("Baccharis", "microphylla", VARIETY, "rhomboidea") .combAuthors(null, "Sch.Bip.") .combExAuthors("Wedd.") .nomNote("nom.nud.") .nothingElse(); assertName("Abies brevifolia hort. ex Dallim.", "Abies brevifolia") .species("Abies", "brevifolia") .combExAuthors("hort.") .combAuthors(null, "Dallim.") .nothingElse(); assertName("Abies brevifolia cv. ex Dallim.", "Abies brevifolia") .species("Abies", "brevifolia") .combExAuthors("hort.") .combAuthors(null, "Dallim.") .nothingElse(); assertName("Abutilon ×hybridum cv. ex Voss", "Abutilon × hybridum") .species("Abutilon", "hybridum") .notho(SPECIFIC) .combExAuthors("hort.") .combAuthors(null, "Voss") .nothingElse(); // "Abutilon bastardioides Baker f. ex Rose" // "Aukuba ex Koehne 'Thunb' " // "Crepinella subgen. Marchal ex Oliver " // "Echinocereus sect. Triglochidiata ex Bravo" // "Hadrolaelia sect. Sophronitis ex Chiron & V.P.Castro" } @Test public void fourPartedNames() throws Exception { assertName("Poa pratensis kewensis primula (L.) Rouy, 1913", "Poa pratensis primula") .infraSpecies("Poa", "pratensis", INFRASUBSPECIFIC_NAME, "primula") .combAuthors("1913", "Rouy") .basAuthors(null, "L.") .code(ZOOLOGICAL) .nothingElse(); assertName("Bombus sichelii alticola latofasciatus", "Bombus sichelii latofasciatus") .infraSpecies("Bombus", "sichelii", INFRASUBSPECIFIC_NAME, "latofasciatus") .nothingElse(); assertName("Acipenser gueldenstaedti colchicus natio danubicus Movchan, 1967", "Acipenser gueldenstaedti natio danubicus") .infraSpecies("Acipenser", "gueldenstaedti", NATIO, "danubicus") .combAuthors("1967", "Movchan") .code(ZOOLOGICAL) .nothingElse(); assertName("Cymbella cistula var. sinus regis", "Cymbella cistula var. sinus") .infraSpecies("Cymbella", "cistula", VARIETY, "sinus") .partial("regis") .nothingElse(); } @Test public void monomial() throws Exception { assertName("Animalia", "Animalia") .monomial("Animalia") .nothingElse(); assertName("Polychaeta", "Polychaeta") .monomial("Polychaeta") .nothingElse(); assertName("Chrysopetalidae", "Chrysopetalidae") .monomial("Chrysopetalidae") .nothingElse(); assertName("Chrysopetalidae", null, ZOOLOGICAL, "Chrysopetalidae") .monomial("Chrysopetalidae", Rank.FAMILY) .code(ZOOLOGICAL) .nothingElse(); assertName("Acripeza Guérin-Ménéville 1838", "Acripeza") .monomial("Acripeza") .combAuthors("1838", "Guérin-Ménéville") .code(NomCode.ZOOLOGICAL) .nothingElse(); } @Test public void inReferences() throws Exception { assertName("Xolisma turquini Small apud Britton & Wilson", "Xolisma turquini") .species("Xolisma", "turquini") .combAuthors(null, "Small") .nomNote("apud Britton & Wilson") .nothingElse(); assertName("Negundo aceroides var. violaceum G.Kirchn. in Petzold & G.Kirchn.", "Negundo aceroides var. violaceum") .infraSpecies("Negundo", "aceroides", Rank.VARIETY, "violaceum") .combAuthors(null, "G.Kirchn.") .nomNote("in Petzold & G.Kirchn.") .nothingElse(); assertName("Abies denheyeri Eghbalian, Khanjani and Ueckermann in Eghbalian, Khanjani & Ueckermann, 2017", "Abies denheyeri") .species("Abies", "denheyeri") .combAuthors("2017", "Eghbalian", "Khanjani", "Ueckermann") .nomNote("in Eghbalian, Khanjani & Ueckermann") .code(ZOOLOGICAL) .nothingElse(); assertName("Mica Budde-Lund in Voeltzkow, 1908", "Mica") .monomial("Mica") .combAuthors("1908", "Budde-Lund") .nomNote("in Voeltzkow") .code(NomCode.ZOOLOGICAL) .nothingElse(); } @Test public void supraGenericIPNI() throws Exception { assertName("Poaceae subtrib. Scolochloinae Soreng", "Scolochloinae") .monomial("Scolochloinae", SUBTRIBE) .combAuthors(null, "Soreng") .nothingElse(); assertName("subtrib. Scolochloinae Soreng", "Scolochloinae") .monomial("Scolochloinae", SUBTRIBE) .combAuthors(null, "Soreng") .nothingElse(); } @Test public void infraGeneric() throws Exception { assertName("Pinus suprasect. Taeda", "Pinus supersect. Taeda") .infraGeneric("Pinus", SUPERSECTION, "Taeda") .code(NomCode.BOTANICAL) .nothingElse(); assertName("Aeonium nothosect. Leugalonium", "Aeonium nothosect. Leugalonium") .infraGeneric("Aeonium", SECTION, "Leugalonium") .notho(NamePart.INFRAGENERIC) .code(NomCode.BOTANICAL) .nothingElse(); assertName("Narcissus nothoser. Dubizettae", "Narcissus nothoser. Dubizettae") .infraGeneric("Narcissus", SERIES, "Dubizettae") .notho(NamePart.INFRAGENERIC) .code(NomCode.BOTANICAL) .nothingElse(); assertName("Serapias nothosubsect. Pladiopetalae", "Serapias nothosubsect. Pladiopetalae") .infraGeneric("Serapias", SUBSECTION, "Pladiopetalae") .notho(NamePart.INFRAGENERIC) .code(NomCode.BOTANICAL) .nothingElse(); assertName("Rubus nothosubgen. Cylarubus", "Rubus nothosubgen. Cylarubus") .infraGeneric("Rubus", SUBGENUS, "Cylarubus") .notho(NamePart.INFRAGENERIC) .nothingElse(); assertName("Arrhoges (Antarctohoges)", SUBGENUS, "Arrhoges subgen. Antarctohoges") .infraGeneric("Arrhoges", SUBGENUS, "Antarctohoges") .nothingElse(); assertName("Polygonum", Rank.SUBGENUS, "subgen. Polygonum") .infraGeneric(null, Rank.SUBGENUS, "Polygonum") .nothingElse(); assertName("subgen. Trematostoma Sacc.", "subgen. Trematostoma") .infraGeneric(null, SUBGENUS, "Trematostoma") .combAuthors(null, "Sacc.") .nothingElse(); assertName("Echinocereus sect. Triglochidiata Bravo", "Echinocereus sect. Triglochidiata") .infraGeneric("Echinocereus", SECTION, "Triglochidiata") .combAuthors(null, "Bravo") .code(BOTANICAL) .nothingElse(); assertName("Zignoella subgen. Trematostoma Sacc.", "Zignoella subgen. Trematostoma") .infraGeneric("Zignoella", SUBGENUS, "Trematostoma") .combAuthors(null, "Sacc.") .nothingElse(); assertName("Polygonum subgen. Bistorta (L.) Zernov", "Polygonum subgen. Bistorta") .infraGeneric("Polygonum", SUBGENUS, "Bistorta") .combAuthors(null, "Zernov") .basAuthors(null, "L.") .code(BOTANICAL) .nothingElse(); assertName("Arrhoges (Antarctohoges)", "Arrhoges") .monomial("Arrhoges") .basAuthors(null, "Antarctohoges") .code(NomCode.ZOOLOGICAL) .nothingElse(); assertName("Festuca subg. Schedonorus (P. Beauv. ) Peterm.", "Festuca subgen. Schedonorus") .infraGeneric("Festuca", SUBGENUS, "Schedonorus") .combAuthors(null, "Peterm.") .basAuthors(null, "P.Beauv.") .code(NomCode.BOTANICAL) .nothingElse(); assertName("Catapodium subg.Agropyropsis Trab.", "Catapodium subgen. Agropyropsis") .infraGeneric("Catapodium", SUBGENUS, "Agropyropsis") .combAuthors(null, "Trab.") .nothingElse(); assertName(" Gnaphalium subg. Laphangium Hilliard & B. L. Burtt", "Gnaphalium subgen. Laphangium") .infraGeneric("Gnaphalium", SUBGENUS, "Laphangium") .combAuthors(null, "Hilliard", "B.L.Burtt") .nothingElse(); assertName("Woodsiaceae (Hooker) Herter", "Woodsiaceae") .monomial("Woodsiaceae", FAMILY) .combAuthors(null, "Herter") .basAuthors(null, "Hooker") .code(NomCode.BOTANICAL) .nothingElse(); } @Test public void notNames() throws Exception { assertName("Diatrypella favacea var. favacea (Fr.) Ces. & De Not.", "Diatrypella favacea var. favacea") .infraSpecies("Diatrypella", "favacea", VARIETY, "favacea") .combAuthors(null, "Ces.", "De Not.") .basAuthors(null, "Fr.") .code(BOTANICAL) .nothingElse(); assertName("Protoventuria rosae (De Not.) Berl. & Sacc.", "Protoventuria rosae") .species("Protoventuria", "rosae") .combAuthors(null, "Berl.", "Sacc.") .basAuthors(null, "De Not.") .code(BOTANICAL) .nothingElse(); assertName("Hormospora De Not.", "Hormospora") .monomial("Hormospora") .combAuthors(null, "De Not.") .nothingElse(); } @Test public void unparsablePlaceholder() throws Exception { assertUnparsable("[unassigned] Cladobranchia", PLACEHOLDER); assertUnparsable("Biota incertae sedis", PLACEHOLDER); assertUnparsable("Mollusca not assigned", PLACEHOLDER); assertUnparsable("Unaccepted", PLACEHOLDER); assertUnparsable("uncultured Verrucomicrobiales bacterium", PLACEHOLDER); assertUnparsable("uncultured Vibrio sp.", PLACEHOLDER); assertUnparsable("uncultured virus", PLACEHOLDER); // ITIS placeholders: assertUnparsable("Temp dummy name", PLACEHOLDER); assertUnparsable("N.N.", PLACEHOLDER); assertUnparsable("N.N. (e.g., Breoghania)", PLACEHOLDER); assertUnparsable("N.N. (Chitinivorax)", PLACEHOLDER); assertUnparsable("N.n. (Chitinivorax)", PLACEHOLDER); assertUnparsable("Gen.nov. sp.nov.", NO_NAME); assertUnparsable("Gen.nov.", NO_NAME); } @Test public void placeholder() throws Exception { assertName("denheyeri Eghbalian, Khanjani and Ueckermann in Eghbalian, Khanjani & Ueckermann, 2017", "? denheyeri") .species("?", "denheyeri") .combAuthors("2017", "Eghbalian", "Khanjani", "Ueckermann") .type(PLACEHOLDER) .nomNote("in Eghbalian, Khanjani & Ueckermann") //hm, is this correct? maybe better not parse the in authorship year??? .code(NomCode.ZOOLOGICAL) .warning(Warnings.MISSING_GENUS) .nothingElse(); assertName("\"? gryphoidis", "? gryphoidis") .species("?", "gryphoidis") .type(PLACEHOLDER) .nothingElse(); assertName("\"? gryphoidis (Bourguignat 1870) Schoepf. 1909", "? gryphoidis") .species("?", "gryphoidis") .basAuthors("1870", "Bourguignat") .combAuthors("1909", "Schoepf.") .type(PLACEHOLDER) .code(ZOOLOGICAL) .nothingElse(); assertName("Missing penchinati Bourguignat, 1870", "? penchinati") .species("?", "penchinati") .combAuthors("1870", "Bourguignat") .type(PLACEHOLDER) .code(ZOOLOGICAL) .nothingElse(); } @Test public void sanctioned() throws Exception { // sanctioning authors not supported assertName("Boletus versicolor L. : Fr.", "Boletus versicolor") .species("Boletus", "versicolor") .combAuthors(null, "L.") .sanctAuthor("Fr.") .nothingElse(); assertName("Agaricus compactus sarcocephalus (Fr. : Fr.) Fr. ", "Agaricus compactus sarcocephalus") .infraSpecies("Agaricus", "compactus", INFRASPECIFIC_NAME, "sarcocephalus") .combAuthors(null, "Fr.") .basAuthors(null, "Fr.") .code(BOTANICAL) .nothingElse(); assertName("Agaricus compactus sarcocephalus (Fr. : Fr.) Fr. ", "Agaricus compactus sarcocephalus") .infraSpecies("Agaricus", "compactus", INFRASPECIFIC_NAME, "sarcocephalus") .combAuthors(null, "Fr.") .basAuthors(null, "Fr.") .code(BOTANICAL) .nothingElse(); } @Test public void nothotaxa() throws Exception { assertName("Iris germanica nothovar. florentina", "Iris germanica nothovar. florentina") .infraSpecies("Iris", "germanica", VARIETY, "florentina") .notho(INFRASPECIFIC) .nothingElse(); assertName("Abies alba var. ×alpina L.", "Abies alba nothovar. alpina") .infraSpecies("Abies", "alba", VARIETY, "alpina") .notho(INFRASPECIFIC) .combAuthors(null, "L.") .nothingElse(); } @Test public void aggregates() throws Exception { assertName("Achillea millefolium agg. L.", "Achillea millefolium") .binomial("Achillea", null, "millefolium", Rank.SPECIES_AGGREGATE) .combAuthors(null, "L.") .nothingElse(); assertName("Strumigenys koningsbergeri-group", "Strumigenys koningsbergeri") .binomial("Strumigenys", null, "koningsbergeri", Rank.SPECIES_AGGREGATE) .nothingElse(); assertName("Selenophorus parumpunctatus species group", "Selenophorus parumpunctatus") .binomial("Selenophorus", null, "parumpunctatus", Rank.SPECIES_AGGREGATE) .nothingElse(); assertName("Monomorium monomorium group", "Monomorium monomorium") .binomial("Monomorium", null, "monomorium", Rank.SPECIES_AGGREGATE) .nothingElse(); } /** * BOLD NCBI have lots of these aggregates. * As it can be on any level we do not want them to be parsed properly with rank=SpeciesAggregate */ @Test public void unparsablePlaceholders() throws Exception { assertUnparsable("Iteaphila-group", PLACEHOLDER); assertUnparsable("Bartonella group", PLACEHOLDER); } @Test public void rankExplicit() throws Exception { assertName("Achillea millefolium L.", Rank.SPECIES, "Achillea millefolium") .species("Achillea", "millefolium") .combAuthors(null, "L.") .nothingElse(); assertName("Achillea millefolium L.", Rank.SPECIES_AGGREGATE, "Achillea millefolium") .binomial("Achillea", null, "millefolium", Rank.SPECIES_AGGREGATE) .combAuthors(null, "L.") .nothingElse(); // higher ranks should be marked as doubtful for (Rank r : Rank.values()) { if (r.otherOrUnranked() || r.isSpeciesOrBelow()) continue; NameAssertion ass = assertName("Achillea millefolium L.", r, "Achillea millefolium") .binomial("Achillea", null, "millefolium", r) .combAuthors(null, "L.") .type(INFORMAL) .doubtful(); if (r.isRestrictedToCode() != null) { ass.code(r.isRestrictedToCode()); } ass.warning(Warnings.RANK_MISMATCH); ass.nothingElse(); } } @Test public void candidatus() throws Exception { assertName("\"Candidatus Endowatersipora\" Anderson and Haygood, 2007", "\"Candidatus Endowatersipora\"") .monomial("Endowatersipora") .candidatus() .combAuthors("2007", "Anderson", "Haygood") .nothingElse(); assertName("Candidatus Phytoplasma allocasuarinae", "\"Candidatus Phytoplasma allocasuarinae\"") .species("Phytoplasma", "allocasuarinae") .candidatus() .nothingElse(); assertName("Ca. Phytoplasma allocasuarinae", "\"Candidatus Phytoplasma allocasuarinae\"") .species("Phytoplasma", "allocasuarinae") .candidatus() .nothingElse(); assertName("Ca. Phytoplasma", "\"Candidatus Phytoplasma\"") .monomial("Phytoplasma") .candidatus() .nothingElse(); assertName("'Candidatus Nicolleia'", "\"Candidatus Nicolleia\"") .monomial("Nicolleia") .candidatus() .nothingElse(); assertName("\"Candidatus Riegeria\" Gruber-Vodicka et al., 2011", "\"Candidatus Riegeria\"") .monomial("Riegeria") .combAuthors("2011", "Gruber-Vodicka", "al.") .candidatus() .nothingElse(); assertName("Candidatus Endobugula", "\"Candidatus Endobugula\"") .monomial("Endobugula") .candidatus() .nothingElse(); // not candidate names assertName("Centropogon candidatus Lammers", "Centropogon candidatus") .species("Centropogon", "candidatus") .combAuthors(null, "Lammers") .nothingElse(); } @Test @Ignore public void strains() throws Exception { assertName("Endobugula sp. JYr4", "Endobugula sp. JYr4") .species("Endobugula", null) .strain("sp. JYr4") .nothingElse(); // avoid author & year to be accepted as strain assertName("Anniella nigra FISCHER 1885", "Anniella nigra") .species("Anniella", "nigra") .combAuthors("1885", "Fischer") .nothingElse(); } @Test public void norwegianRadiolaria() throws Exception { assertName("Actinomma leptodermum longispinum Cortese & Bjørklund 1998", "Actinomma leptodermum longispinum") .infraSpecies("Actinomma", "leptodermum", INFRASPECIFIC_NAME, "longispinum") .combAuthors("1998", "Cortese", "Bjørklund") .code(ZOOLOGICAL) .nothingElse(); assertName("Arachnosphaera dichotoma Jørgensen, 1900", "Arachnosphaera dichotoma") .species("Arachnosphaera", "dichotoma") .combAuthors("1900", "Jørgensen") .code(ZOOLOGICAL) .nothingElse(); assertName("Hexaconthium pachydermum forma legitime Cortese & Bjørklund 1998", "Hexaconthium pachydermum f. legitime") .infraSpecies("Hexaconthium", "pachydermum", FORM, "legitime") .combAuthors("1998", "Cortese", "Bjørklund") .code(ZOOLOGICAL) .nothingElse(); assertName("Hexaconthium pachydermum form A Cortese & Bjørklund 1998", "Hexaconthium pachydermum f. A") .infraSpecies("Hexaconthium", "pachydermum", FORM, "A") .combAuthors("1998", "Cortese", "Bjørklund") .type(INFORMAL) .code(ZOOLOGICAL) .nothingElse(); assertName("Tripodiscium gephyristes (Hülseman, 1963) BJ&KR-Atsdatabanken", "Tripodiscium gephyristes") .species("Tripodiscium", "gephyristes") .basAuthors("1963", "Hülseman") .combAuthors(null, "BJ", "KR-Atsdatabanken") .code(ZOOLOGICAL) .nothingElse(); assertName("Protocystis xiphodon (Haeckel, 1887), Borgert, 1901", "Protocystis xiphodon") .species("Protocystis", "xiphodon") .basAuthors("1887", "Haeckel") .combAuthors("1901", "Borgert") .code(ZOOLOGICAL) .nothingElse(); assertName("Acrosphaera lappacea (Haeckel, 1887) Takahashi, 1991", "Acrosphaera lappacea") .species("Acrosphaera", "lappacea") .basAuthors("1887", "Haeckel") .combAuthors("1991", "Takahashi") .code(ZOOLOGICAL) .nothingElse(); } @Test public void cultivars() throws Exception { assertName("Abutilon 'Kentish Belle'", "Abutilon 'Kentish Belle'") .cultivar("Abutilon", "Kentish Belle") .nothingElse(); assertName("Acer campestre L. cv. 'nanum'", "Acer campestre 'nanum'") .cultivar("Acer", "campestre", "nanum") .combAuthors(null, "L.") .nothingElse(); assertName("Verpericola megasoma \"Dall\" Pils.", "Verpericola megasoma 'Dall'") .cultivar("Verpericola", "megasoma", "Dall") .combAuthors(null, "Pils.") .nothingElse(); assertName("Abutilon 'Kentish Belle'", "Abutilon 'Kentish Belle'") .cultivar("Abutilon", "Kentish Belle") .nothingElse(); assertName("Abutilon 'Nabob'", "Abutilon 'Nabob'") .cultivar("Abutilon", "Nabob") .nothingElse(); assertName("Sorbus americana Marshall cv. 'Belmonte'", "Sorbus americana 'Belmonte'") .cultivar("Sorbus", "americana", "Belmonte") .combAuthors(null, "Marshall") .nothingElse(); assertName("Sorbus hupehensis C.K.Schneid. cv. 'November pink'", "Sorbus hupehensis 'November pink'") .cultivar("Sorbus", "hupehensis", "November pink") .combAuthors(null, "C.K.Schneid.") .nothingElse(); assertName("Symphoricarpos albus (L.) S.F.Blake cv. 'Turesson'", "Symphoricarpos albus 'Turesson'") .cultivar("Symphoricarpos", "albus", CULTIVAR, "Turesson") .basAuthors(null, "L.") .combAuthors(null, "S.F.Blake") .nothingElse(); assertName("Symphoricarpos sp. cv. 'mother of pearl'", "Symphoricarpos 'mother of pearl'") .cultivar("Symphoricarpos", CULTIVAR, "mother of pearl") .nothingElse(); assertName("Primula Border Auricula Group", "Primula Border Auricula Group") .cultivar("Primula", CULTIVAR_GROUP, "Border Auricula") .nothingElse(); assertName("Rhododendron boothii Mishmiense Group", "Rhododendron boothii Mishmiense Group") .cultivar("Rhododendron", "boothii", CULTIVAR_GROUP, "Mishmiense") .nothingElse(); assertName("Paphiopedilum Sorel grex", "Paphiopedilum Sorel gx") .cultivar("Paphiopedilum", GREX, "Sorel") .nothingElse(); assertName("Cattleya Prince John gx", "Cattleya Prince John gx") .cultivar("Cattleya", GREX, "Prince John") .nothingElse(); } @Test public void hybridFormulas() throws Exception { assertName("Polypodium x vulgare nothosubsp. mantoniae (Rothm.) Schidlay", "Polypodium vulgare nothosubsp. mantoniae") .infraSpecies("Polypodium", "vulgare", SUBSPECIES, "mantoniae") .basAuthors(null, "Rothm.") .combAuthors(null, "Schidlay") .notho(INFRASPECIFIC) .code(NomCode.BOTANICAL) .nothingElse(); assertHybridFormula("Asplenium rhizophyllum DC. x ruta-muraria E.L. Braun 1939"); assertHybridFormula("Arthopyrenia hyalospora X Hydnellum scrobiculatum"); assertHybridFormula("Arthopyrenia hyalospora (Banker) D. Hall X Hydnellum scrobiculatum D.E. Stuntz"); assertHybridFormula("Arthopyrenia hyalospora × ? "); assertHybridFormula("Agrostis L. × Polypogon Desf. "); assertHybridFormula("Agrostis stolonifera L. × Polypogon monspeliensis (L.) Desf. "); assertHybridFormula("Asplenium rhizophyllum X A. ruta-muraria E.L. Braun 1939"); assertHybridFormula("Asplenium rhizophyllum DC. x ruta-muraria E.L. Braun 1939"); assertHybridFormula("Asplenium rhizophyllum x ruta-muraria"); assertHybridFormula("Salix aurita L. × S. caprea L."); assertHybridFormula("Mentha aquatica L. × M. arvensis L. × M. spicata L."); assertHybridFormula("Polypodium vulgare subsp. prionodes (Asch.) Rothm. × subsp. vulgare"); assertHybridFormula("Tilletia caries (Bjerk.) Tul. × T. foetida (Wallr.) Liro."); assertHybridFormula("Cirsium acaulon x arvense"); assertHybridFormula("Juncus effusus × inflexus"); assertHybridFormula("Symphytum caucasicum x uplandicum"); } protected void assertHybridFormula(String name) { assertUnparsable(name, HYBRID_FORMULA); } @Test public void oTU() throws Exception { assertName("SH1508347.08FU", "SH1508347.08FU") .monomial("SH1508347.08FU") .type(OTU) .nothingElse(); assertName("SH19186714.17FU", "SH19186714.17FU") .monomial("SH19186714.17FU") .type(OTU) .nothingElse(); assertName("SH191814.08FU", "SH191814.08FU") .monomial("SH191814.08FU") .type(OTU) .nothingElse(); assertName("SH191814.04FU", "SH191814.04FU") .monomial("SH191814.04FU") .type(OTU) .nothingElse(); assertName("BOLD:ACW2100", "BOLD:ACW2100") .monomial("BOLD:ACW2100") .type(OTU) .nothingElse(); assertName("SH460441.07FU", "SH460441.07FU") .monomial("SH460441.07FU") .type(OTU) .nothingElse(); assertName("sh460441.07fu", "SH460441.07FU") .monomial("SH460441.07FU") .type(OTU) .nothingElse(); assertName("Festuca sp. BOLD:ACW2100", "BOLD:ACW2100") .monomial("BOLD:ACW2100") .type(OTU) .nothingElse(); // no OTU names assertName("Boldenaria", "Boldenaria") .monomial("Boldenaria") .nothingElse(); assertName("Boldea", "Boldea") .monomial("Boldea") .nothingElse(); assertName("Boldiaceae", "Boldiaceae") .monomial("Boldiaceae", Rank.FAMILY) .nothingElse(); assertName("Boldea vulgaris", "Boldea vulgaris") .species("Boldea", "vulgaris") .nothingElse(); } @Test public void strainNames() throws Exception { assertName("Candidatus Liberibacter solanacearum", "\"Candidatus Liberibacter solanacearum\"") .species("Liberibacter", "solanacearum") .candidatus() .nothingElse(); //assertName("Methylocystis sp. M6", "Methylocystis sp. M6") // .species("Liberibacter", "solanacearum") // .candidatus() // .nothingElse(); //assertStrain("", NameType.INFORMAL, "Methylocystis", null, null, Rank.SPECIES, "M6"); //assertStrain("Advenella kashmirensis W13003", NameType.INFORMAL, "Advenella", "kashmirensis", null, null, "W13003"); //assertStrain("Garra cf. dampaensis M23", NameType.INFORMAL, "Garra", "dampaensis", null, null, "M23"); //assertStrain("Sphingobium lucknowense F2", NameType.INFORMAL, "Sphingobium", "lucknowense", null, null, "F2"); //assertStrain("Pseudomonas syringae pv. atrofaciens LMG 5095T", NameType.INFORMAL, "Pseudomonas", "syringae", "atrofaciens", Rank.PATHOVAR, "LMG 5095T"); } @Test public void hybridAlikeNames() throws Exception { assertName("Huaiyuanella Xing, Yan & Yin, 1984", "Huaiyuanella") .monomial("Huaiyuanella") .combAuthors("1984", "Xing", "Yan", "Yin") .code(ZOOLOGICAL) .nothingElse(); assertName("Caveasphaera Xiao & Knoll, 2000", "Caveasphaera") .monomial("Caveasphaera") .combAuthors("2000", "Xiao", "Knoll") .code(ZOOLOGICAL) .nothingElse(); } @Test @Ignore("Need to evaluate and implement these alpha/beta/gamme/theta names. Comes from cladistics?") public void alphaBetaThetaNames() { } @Test public void hybridNames() throws Exception { assertName("+ Pyrocrataegus willei L.L.Daniel", "× Pyrocrataegus willei") .species("Pyrocrataegus", "willei") .combAuthors(null, "L.L.Daniel") .notho(NamePart.GENERIC) .nothingElse(); assertName("×Pyrocrataegus willei L.L. Daniel", "× Pyrocrataegus willei") .species("Pyrocrataegus", "willei") .combAuthors(null, "L.L.Daniel") .notho(NamePart.GENERIC) .nothingElse(); assertName(" × Pyrocrataegus willei L. L. Daniel", "× Pyrocrataegus willei") .species("Pyrocrataegus", "willei") .combAuthors(null, "L.L.Daniel") .notho(NamePart.GENERIC) .nothingElse(); assertName(" X Pyrocrataegus willei L. L. Daniel", "× Pyrocrataegus willei") .species("Pyrocrataegus", "willei") .combAuthors(null, "L.L.Daniel") .notho(NamePart.GENERIC) .nothingElse(); assertName("Pyrocrataegus ×willei L. L. Daniel", "Pyrocrataegus × willei") .species("Pyrocrataegus", "willei") .combAuthors(null, "L.L.Daniel") .notho(NamePart.SPECIFIC) .nothingElse(); assertName("Pyrocrataegus × willei L. L. Daniel", "Pyrocrataegus × willei") .species("Pyrocrataegus", "willei") .combAuthors(null, "L.L.Daniel") .notho(NamePart.SPECIFIC) .nothingElse(); assertName("Pyrocrataegus x willei L. L. Daniel", "Pyrocrataegus × willei") .species("Pyrocrataegus", "willei") .combAuthors(null, "L.L.Daniel") .notho(NamePart.SPECIFIC) .nothingElse(); assertName("Pyrocrataegus X willei L. L. Daniel", "Pyrocrataegus × willei") .species("Pyrocrataegus", "willei") .combAuthors(null, "L.L.Daniel") .notho(NamePart.SPECIFIC) .nothingElse(); assertName("Pyrocrataegus willei ×libidi L.L.Daniel", "Pyrocrataegus willei × libidi") .infraSpecies("Pyrocrataegus", "willei", INFRASPECIFIC_NAME, "libidi") .combAuthors(null, "L.L.Daniel") .notho(INFRASPECIFIC) .nothingElse(); assertName("Pyrocrataegus willei nothosubsp. libidi L.L.Daniel", "Pyrocrataegus willei nothosubsp. libidi") .infraSpecies("Pyrocrataegus", "willei", SUBSPECIES, "libidi") .combAuthors(null, "L.L.Daniel") .notho(INFRASPECIFIC) .nothingElse(); assertName("+ Pyrocrataegus willei nothosubsp. libidi L.L.Daniel", "Pyrocrataegus willei nothosubsp. libidi") .infraSpecies("Pyrocrataegus", "willei", SUBSPECIES, "libidi") .combAuthors(null, "L.L.Daniel") .notho(INFRASPECIFIC) .nothingElse(); //TODO: impossible name. should this not be a generic hybrid as its the highest rank crossed? assertName("×Pyrocrataegus ×willei ×libidi L.L.Daniel", "Pyrocrataegus willei × libidi") .infraSpecies("Pyrocrataegus", "willei", INFRASPECIFIC_NAME, "libidi") .combAuthors(null, "L.L.Daniel") .notho(INFRASPECIFIC) .nothingElse(); } @Test public void authorVariations() throws Exception { // van der assertName("Megistocera tenuis (van der Wulp, 1885)", "Megistocera tenuis") .species("Megistocera", "tenuis") .basAuthors("1885", "van der Wulp") .code(ZOOLOGICAL) .nothingElse(); // turkish chars assertName("Stachys marashica Ilçim, Çenet & Dadandi", "Stachys marashica") .species("Stachys", "marashica") .combAuthors(null, "Ilçim", "Çenet", "Dadandi") .nothingElse(); assertName("Viola bocquetiana S. Yildirimli", "Viola bocquetiana") .species("Viola", "bocquetiana") .combAuthors(null, "S.Yildirimli") .nothingElse(); assertName("Anatolidamnicola gloeri gloeri Şahin, Koca & Yildirim, 2012", "Anatolidamnicola gloeri gloeri") .infraSpecies("Anatolidamnicola", "gloeri", Rank.INFRASPECIFIC_NAME, "gloeri") .combAuthors("2012", "Şahin", "Koca", "Yildirim") .code(ZOOLOGICAL) .nothingElse(); assertName("Modiola caroliniana L.f", "Modiola caroliniana") .species("Modiola", "caroliniana") .combAuthors(null, "L.f") .nothingElse(); assertName("Modiola caroliniana (L.) G. Don filius", "Modiola caroliniana") .species("Modiola", "caroliniana") .basAuthors(null, "L.") .combAuthors(null, "G.Don filius") .code(NomCode.BOTANICAL) .nothingElse(); assertName("Modiola caroliniana (L.) G. Don fil.", "Modiola caroliniana") .species("Modiola", "caroliniana") .basAuthors(null, "L.") .combAuthors(null, "G.Don fil.") .code(NomCode.BOTANICAL) .nothingElse(); assertName("Cirsium creticum d'Urv.", "Cirsium creticum") .species("Cirsium", "creticum") .combAuthors(null, "d'Urv.") .nothingElse(); // TODO: autonym authors are the species authors !!! assertName("Cirsium creticum d'Urv. subsp. creticum", "Cirsium creticum subsp. creticum") .infraSpecies("Cirsium", "creticum", SUBSPECIES, "creticum") //.combAuthors(null, "d'Urv.") .autonym() .code(NomCode.BOTANICAL) .nothingElse(); assertName("Cirsium creticum Balsamo M Fregni E Tongiorgi P", "Cirsium creticum") .species("Cirsium", "creticum") .combAuthors(null, "M.Balsamo", "E.Fregni", "P.Tongiorgi") .nothingElse(); assertName("Cirsium creticum Balsamo M Todaro MA", "Cirsium creticum") .species("Cirsium", "creticum") .combAuthors(null, "M.Balsamo", "M.A.Todaro") .nothingElse(); assertName("Bolivina albatrossi Cushman Em. Sellier de Civrieux, 1976", "Bolivina albatrossi") .species("Bolivina", "albatrossi") .combAuthors("1976", "Cushman Em.Sellier de Civrieux") .code(ZOOLOGICAL) .nothingElse(); assertName("Cribbia pendula la Croix & P.J.Cribb", "Cribbia pendula") .species("Cribbia", "pendula") .combAuthors(null, "la Croix", "P.J.Cribb") .nothingElse(); assertName("Cribbia pendula le Croix & P.J.Cribb", "Cribbia pendula") .species("Cribbia", "pendula") .combAuthors(null, "le Croix", "P.J.Cribb") .nothingElse(); assertName("Cribbia pendula de la Croix & le P.J.Cribb", "Cribbia pendula") .species("Cribbia", "pendula") .combAuthors(null, "de la Croix", "le P.J.Cribb") .nothingElse(); assertName("Cribbia pendula Croix & de le P.J.Cribb", "Cribbia pendula") .species("Cribbia", "pendula") .combAuthors(null, "Croix", "de le P.J.Cribb") .nothingElse(); assertName("Navicula ambigua f. craticularis Istv?nffi, 1898, 1897", "Navicula ambigua f. craticularis") .infraSpecies("Navicula", "ambigua", Rank.FORM, "craticularis") .combAuthors("1898", "Istvnffi") .doubtful() .code(ZOOLOGICAL) .warning(Warnings.QUESTION_MARKS_REMOVED) .nothingElse(); assertName("Cestodiscus gemmifer F.S.Castracane degli Antelminelli", "Cestodiscus gemmifer") .species("Cestodiscus", "gemmifer") .combAuthors(null, "F.S.Castracane degli Antelminelli") .nothingElse(); assertName("Hieracium scorzoneraefolium De la Soie", "Hieracium scorzoneraefolium") .species("Hieracium", "scorzoneraefolium") .combAuthors(null, "De la Soie") .nothingElse(); assertName("Calycostylis aurantiaca Hort. ex Vilmorin", "Calycostylis aurantiaca") .species("Calycostylis", "aurantiaca") .combAuthors(null, "Vilmorin") .combExAuthors("hort.") .nothingElse(); assertName("Pourretia magnispatha hortusa ex K. Koch", "Pourretia magnispatha") .species("Pourretia", "magnispatha") .combAuthors(null, "K.Koch") .combExAuthors("hort.") .nothingElse(); assertName("Pitcairnia pruinosa hortus ex K. Koch", "Pitcairnia pruinosa") .species("Pitcairnia", "pruinosa") .combAuthors(null, "K.Koch") .combExAuthors("hort.") .nothingElse(); assertName("Platycarpha glomerata (Thunberg) A.P.de Candolle", "Platycarpha glomerata") .species("Platycarpha", "glomerata") .basAuthors(null, "Thunberg") .combAuthors(null, "A.P.de Candolle") .code(NomCode.BOTANICAL) .nothingElse(); } @Test public void extinctNames() throws Exception { assertName("†Titanoptera", "Titanoptera") .monomial("Titanoptera") .nothingElse(); assertName("† Tuarangiida MacKinnon, 1982", "Tuarangiida") .monomial("Tuarangiida") .combAuthors("1982", "MacKinnon") .code(ZOOLOGICAL) .nothingElse(); } /** * Simply test all names in names-with-authors.txt and make sure they parse without exception * and have an authorship! * This test does not verify if the parsed name was correct in all its pieces, * so only use this as a quick way to add names to tests. * <p> * Exceptional cases should better be tested in a test on its own! */ @Test public void namesWithAuthorFile() throws Exception { for (String name : iterResource("names-with-authors.txt")) { ParsedName n = parser.parse(name, null); assertTrue(name, n.getState().isParsed()); assertTrue(name, n.hasAuthorship()); assertEquals(NameType.SCIENTIFIC, n.getType()); } } /** * Test all names in doubtful.txt and make sure they parse without exception, * but have a doubtful flag set. * This test does not verify if the parsed name was correct in all its pieces, * so only use this as a quick way to add names to tests. * <p> * Exceptional cases should better be tested in a test on its own! */ @Test public void doubtfulFile() throws Exception { for (String name : iterResource("doubtful.txt")) { ParsedName n = parser.parse(name, null); assertTrue(name, n.isDoubtful()); assertTrue(name, n.getState().isParsed()); assertTrue(name, n.getType().isParsable()); } } /** * Test all names in unparsable.txt and makes sure they are not parsable. */ @Test public void unparsableFile() throws Exception { for (String name : iterResource("unparsable.txt")) { try { parser.parse(name); fail("Expected " + name + " to be unparsable"); } catch (UnparsableNameException ex) { assertEquals(name, ex.getName()); } } } /** * Test all names in nonames.txt and makes sure they are NO_NAMEs. */ @Test public void nonamesFile() throws Exception { for (String name : iterResource("nonames.txt")) { try { ParsedName pn = parser.parse(name); fail("Expected " + name + " to be unparsable"); } catch (UnparsableNameException ex) { assertEquals("Bad name type for: "+name, NameType.NO_NAME, ex.getType()); assertEquals(name, ex.getName()); } } } /** * Test all hybrid formulas in hybrids.txt and makes sure they are HYBRID_FORMULAs. */ @Test public void hybridsFile() throws Exception { for (String name : iterResource("hybrids.txt")) { try { ParsedName pn = parser.parse(name); fail("Expected " + name + " to be unparsable hybrid"); } catch (UnparsableNameException ex) { assertEquals(NameType.HYBRID_FORMULA, ex.getType()); assertEquals(name, ex.getName()); } } } /** * Test all names in nonames.txt and makes sure they are NO_NAMEs. */ @Test public void placeholderFile() throws Exception { for (String name : iterResource("placeholder.txt")) { try { ParsedName pn = parser.parse(name); fail("Expected " + name + " to be an unparsable placeholder"); } catch (UnparsableNameException ex) { assertEquals(NameType.PLACEHOLDER, ex.getType()); assertEquals(name, ex.getName()); } } } @Test public void occNameFile() throws Exception { int currFail = 4; int fails = parseFile("occurrence-names.txt"); if (fails > currFail) { fail("We are getting worse, not better. Currently failing: " + fails + ". Was passing:" + currFail); } } /** * Parse all verbatim GBIF checklist names to spot room for improvements */ @Test @Ignore public void gbifFile() throws Exception { parseFile("gbif-verbatim-names.txt"); } /** * @return number of failed names */ private int parseFile(String resourceName) throws Exception { int parseFails = 0; int counter = 0; long start = System.currentTimeMillis(); for (String name : iterResource(resourceName)) { counter++; if (counter % 100000 == 0) { long end = System.currentTimeMillis(); LOG.info("{} names tested, {} failed", counter, parseFails); LOG.info("Total time {}ms, average per name {}", (end - start), (((double) end - start) / counter)); } try { ParsedName pn = parser.parse(name); if (pn.getState() != ParsedName.State.COMPLETE) { LOG.debug("{} {}", pn.getState(), name); } } catch (UnparsableNameException ex) { if (ex.getType().isParsable() || ex.getType() == NO_NAME) { parseFails++; LOG.warn("{}: {}", ex.getType(), name); } } } long end = System.currentTimeMillis(); LOG.info("{} names tested, {} failed", counter, parseFails); LOG.info("Total time {}ms, average per name {}", (end - start), (((double) end - start) / counter)); return parseFails; } /** * Converts lines of a classpath resource that are not empty or are comments starting with # * into a simple string iterable */ private Iterable<String> iterResource(String resource) throws UnsupportedEncodingException { LineIterator iter = new LineIterator(resourceReader(resource)); return Iterables.filter(() -> iter, line -> line != null && !line.trim().isEmpty() && !line.startsWith(" ); } /** * Expect empty unparsable results for nothing or whitespace */ @Test public void empty() throws Exception { assertNoName(null); assertNoName(""); assertNoName(" "); assertNoName("\t"); assertNoName("\n"); assertNoName("\t\n"); assertNoName("\""); assertNoName("'"); } /** * Avoid nPEs and other exceptions for very short non names and other extremes found in occurrences. */ @Test public void avoidNPE() throws Exception { assertNoName("\\"); assertNoName("."); assertNoName("@"); assertNoName("&nbsp;"); assertNoName("X"); assertNoName("a"); assertNoName("143"); assertNoName("321-432"); assertNoName("-,. assertNoName(" ."); } @Test public void informal() throws Exception { assertName("Trisulcus aff. nana (Popofsky, 1913), Petrushevskaya, 1971", "Trisulcus aff. nana") .species("Trisulcus", "nana") .basAuthors("1913", "Popofsky") .combAuthors("1971", "Petrushevskaya") .type(INFORMAL) .qualifiers(SPECIFIC, "aff.") .code(ZOOLOGICAL) .nothingElse(); assertName("Cerapachys mayeri cf. var. brachynodus", "Cerapachys mayeri cf. var. brachynodus") .infraSpecies("Cerapachys", "mayeri", VARIETY, "brachynodus") .type(INFORMAL) .qualifiers(INFRASPECIFIC, "cf.") .nothingElse(); assertName("Solenopsis cf fugax", "Solenopsis cf. fugax") .species("Solenopsis", "fugax") .type(INFORMAL) .qualifiers(SPECIFIC, "cf.") .nothingElse(); } @Test public void abbreviated() throws Exception { assertName("N. giraldo", "N. giraldo") .species("N.", "giraldo") .type(INFORMAL) .nothingElse(); assertName("B.", "B.") .monomial("B.") .type(INFORMAL) .nothingElse(); } @Test public void stringIndexOutOfBoundsException() throws Exception { parser.parse("Amblyomma americanum (Linnaeus, 1758)", null); parser.parse("Salix taiwanalpina var. chingshuishanensis (S.S.Ying) F.Y.Lu, C.H.Ou, Y.C.Chen, Y.S.Chi, K.C.Lu & Y.H.Tseng ", null); parser.parse("Salix taiwanalpina var. chingshuishanensis (S.S.Ying) F.Y.Lu, C.H.Ou, Y.C.Chen, Y.S.Chi, K.C.Lu & amp Y.H.Tseng ", null); parser.parse("Salix morrisonicola var. takasagoalpina (Koidz.) F.Y.Lu, C.H.Ou, Y.C.Chen, Y.S.Chi, K.C.Lu & amp; Y.H.Tseng", null); parser.parse("Ficus ernanii Carauta, Pederneir., P.P.Souza, A.F.P.Machado, M.D.M.Vianna & amp; Romaniuc", null); } @Test public void nomNotes() throws Exception { assertName("Anthurium lanceum Engl., nom. illeg., non. A. lancea.", "Anthurium lanceum") .species("Anthurium", "lanceum") .combAuthors(null, "Engl.") .nomNote("nom.illeg.") .code(BOTANICAL) .sensu("non. A.lancea.") .nothingElse(); //TODO: pro syn. assertName("Combretum Loefl. (1758), nom. cons. [= Grislea L. 1753].", "Combretum") .monomial("Combretum") .combAuthors("1758", "Loefl.") .nomNote("nom.cons.") .doubtful() .partial(")(= Grislea L.1753).") .code(NomCode.ZOOLOGICAL) .warning(Warnings.UNUSUAL_CHARACTERS) .nothingElse(); assertName("Anthurium lanceum Engl. nom.illeg.", "Anthurium lanceum") .species("Anthurium", "lanceum") .combAuthors(null, "Engl.") .nomNote("nom.illeg.") .code(BOTANICAL) .nothingElse(); } @Test public void taxonomicNotes() throws Exception { // from Dyntaxa assertName("Pycnophyes Auctt., non Zelinka, 1907", "Pycnophyes") .monomial("Pycnophyes") .sensu("auctt., non Zelinka, 1907") .nothingElse(); assertName("Dyadobacter (Chelius & Triplett, 2000) emend. Reddy & Garcia-Pichel, 2005", "Dyadobacter") .monomial("Dyadobacter") .basAuthors("2000", "Chelius", "Triplett") .sensu("emend. Reddy & Garcia-Pichel, 2005") .code(ZOOLOGICAL) .nothingElse(); assertName("Thalassiosira praeconvexa Burckle emend Gersonde & Schrader, 1984", "Thalassiosira praeconvexa") .species("Thalassiosira", "praeconvexa") .combAuthors(null, "Burckle") .sensu("emend Gersonde & Schrader, 1984") .nothingElse(); assertName("Amphora gracilis f. exilis Gutwinski according to Hollerback & Krasavina, 1971", "Amphora gracilis f. exilis") .infraSpecies("Amphora", "gracilis", Rank.FORM, "exilis") .combAuthors(null, "Gutwinski") .sensu("according to Hollerback & Krasavina, 1971") .nothingElse(); assertSensu("Trifolium repens sensu Baker f.", "sensu Baker f."); assertSensu("Achillea millefolium sensu latu", "sensu latu"); assertSensu("Achillea millefolium s.str.", "s.str."); assertSensu("Achillea millefolium sec. Greuter 2009", "sec. Greuter 2009"); assertSensu("Globularia cordifolia L. excl. var. (emend. Lam.)", "excl. var. (emend. Lam.)"); assertName("Ramaria subbotrytis (Coker) Corner 1950 ss. auct. europ.", "Ramaria subbotrytis") .species("Ramaria", "subbotrytis") .basAuthors(null, "Coker") .combAuthors("1950", "Corner") .sensu("ss. auct. europ.") .code(ZOOLOGICAL) .nothingElse(); assertName("Thelephora cuticularis Berk. ss. auct. europ.", "Thelephora cuticularis") .species("Thelephora", "cuticularis") .combAuthors(null, "Berk.") .sensu("ss. auct. europ.") .nothingElse(); assertName("Handmannia austriaca f. elliptica Handmann fide Hustedt, 1922", "Handmannia austriaca f. elliptica") .infraSpecies("Handmannia", "austriaca", Rank.FORM, "elliptica") .combAuthors(null, "Handmann") .sensu("fide Hustedt, 1922") .nothingElse(); } @Test public void nonNames() throws Exception { // the entire name ends up as a taxonomic note, consider this as unparsed... assertUnparsable("non Ramaria fagetorum Maas Geesteranus 1976 nomen nudum = Ramaria subbotrytis sensu auct. europ.", Rank.SPECIES, NameType.NO_NAME); assertName("Hebeloma album Peck 1900 non ss. auct. europ.", "Hebeloma album") .species("Hebeloma", "album") .combAuthors("1900", "Peck") .sensu("non ss. auct. europ.") .code(NomCode.ZOOLOGICAL) .nothingElse(); assertName("Nitocris (Nitocris) similis Breuning, 1956 (nec Gahan, 1893)", "Nitocris similis") .binomial("Nitocris", "Nitocris", "similis", Rank.SPECIES) .combAuthors("1956", "Breuning") .sensu("nec Gahan, 1893") .code(NomCode.ZOOLOGICAL) .nothingElse(); assertName("Bartlingia Brongn. non Rchb. 1824 nec F.Muell. 1882", "Bartlingia") .monomial("Bartlingia") .combAuthors(null, "Brongn.") .sensu("non Rchb. 1824 nec F.Muell. 1882") .nothingElse(); assertName("Lindera Thunb. non Adans. 1763", "Lindera") .monomial("Lindera") .combAuthors(null, "Thunb.") .sensu("non Adans. 1763") .nothingElse(); assertName("Chorististium maculatum (non Bloch 1790)", "Chorististium maculatum") .species("Chorististium", "maculatum") .sensu("non Bloch 1790") .nothingElse(); assertName("Puntius arulius subsp. tambraparniei (non Silas 1954)", "Puntius arulius subsp. tambraparniei") .infraSpecies("Puntius", "arulius", Rank.SUBSPECIES, "tambraparniei") .sensu("non Silas 1954") .code(NomCode.BOTANICAL) .nothingElse(); } @Test public void misapplied() throws Exception { assertName("Ficus exasperata auct. non Vahl", "Ficus exasperata") .species("Ficus", "exasperata") .sensu("auct. non Vahl") .nothingElse(); assertName("Mentha rotundifolia auct. non (L.) Huds. 1762", "Mentha rotundifolia") .species("Mentha", "rotundifolia") .sensu("auct. non (L.) Huds. 1762") .nothingElse(); } private void assertSensu(String raw, String sensu) throws UnparsableNameException { assertEquals(sensu, parser.parse(raw, null).getTaxonomicNote()); } @Test public void viralNames() throws Exception { assertTrue(isViralName("Cactus virus 2")); assertTrue(isViralName("Vibrio phage 149 (type IV)")); assertTrue(isViralName("Cactus virus 2")); assertTrue(isViralName("Suid herpesvirus 3 Ictv")); assertTrue(isViralName("Tomato yellow leaf curl Mali virus Ictv")); assertTrue(isViralName("Not Sapovirus MC10")); assertTrue(isViralName("Diolcogaster facetosa bracovirus")); assertTrue(isViralName("Human papillomavirus")); assertTrue(isViralName("Sapovirus Hu/GI/Nsc, 150/PA/Bra/, 1993")); assertTrue(isViralName("Aspergillus mycovirus, 1816")); assertTrue(isViralName("Hantavirus sdp2 Yxl-, 2008")); assertTrue(isViralName("Norovirus Nizhny Novgorod /, 2461 / Rus /, 2007")); assertTrue(isViralName("Carrot carlavirus WM-, 2008")); assertTrue(isViralName("C2-like viruses")); assertTrue(isViralName("C1 bacteriophage")); assertTrue(isViralName("C-terminal Gfp fusion vector pUG23")); assertTrue(isViralName("C-terminal Gfp fusion vector")); assertTrue(isViralName("CMVd3 Flexi Vector pFN24K (HaloTag 7)")); assertTrue(isViralName("bacteriophage, 315.6")); assertTrue(isViralName("bacteriophages")); assertTrue(isViralName("\"T1-like viruses\"")); assertTrue(isViralName("Inachis io NPV")); assertTrue(isViralName("Hyloicus pinastri NPV")); assertTrue(isViralName("Dictyoploca japonica NPV")); assertTrue(isViralName("Apocheima pilosaria NPV")); assertTrue(isViralName("Lymantria xylina NPV")); assertTrue(isViralName("Feltia subterranea GV")); assertTrue(isViralName("Dionychopus amasis GV")); assertFalse(isViralName("Forcipomyia flavirustica Remm, 1968")); assertName("Crassatellites janus Hedley, 1906", "Crassatellites janus") .species("Crassatellites", "janus") .combAuthors("1906", "Hedley") .code(ZOOLOGICAL) .nothingElse(); assertName("Ypsolophus satellitella", "Ypsolophus satellitella") .species("Ypsolophus", "satellitella") .nothingElse(); assertName("Nephodia satellites", "Nephodia satellites") .species("Nephodia", "satellites") .nothingElse(); Reader reader = resourceReader("viruses.txt"); LineIterator iter = new LineIterator(reader); while (iter.hasNext()) { String line = iter.nextLine(); if (line == null || line.startsWith("#") || line.trim().isEmpty()) { continue; } assertTrue(isViralName(line)); } } @Test public void apostropheEpithets() throws Exception { assertName("Junellia o'donelli Moldenke, 1946", "Junellia o'donelli") .species("Junellia", "o'donelli") .combAuthors("1946", "Moldenke") .code(ZOOLOGICAL) .nothingElse(); assertName("Trophon d'orbignyi Carcelles, 1946", "Trophon d'orbignyi") .species("Trophon", "d'orbignyi") .combAuthors("1946", "Carcelles") .code(ZOOLOGICAL) .nothingElse(); assertName("Arca m'coyi Tenison-Woods, 1878", "Arca m'coyi") .species("Arca", "m'coyi") .combAuthors("1878", "Tenison-Woods") .code(ZOOLOGICAL) .nothingElse(); assertName("Nucula m'andrewii Hanley, 1860", "Nucula m'andrewii") .species("Nucula", "m'andrewii") .combAuthors("1860", "Hanley") .code(ZOOLOGICAL) .nothingElse(); assertName("Eristalis l'herminierii Macquart", "Eristalis l'herminierii") .species("Eristalis", "l'herminierii") .combAuthors(null, "Macquart") .nothingElse(); assertName("Odynerus o'neili Cameron", "Odynerus o'neili") .species("Odynerus", "o'neili") .combAuthors(null, "Cameron") .nothingElse(); assertName("Serjania meridionalis Cambess. var. o'donelli F.A. Barkley", "Serjania meridionalis var. o'donelli") .infraSpecies("Serjania", "meridionalis", Rank.VARIETY, "o'donelli") .combAuthors(null, "F.A.Barkley") .nothingElse(); } @Test public void initialsAfterSurname() throws Exception { assertName("Purana guttularis (Walker, F., 1858)", "Purana guttularis") .species("Purana", "guttularis") .basAuthors("1858", "F.Walker") .code(ZOOLOGICAL) .nothingElse(); assertName("Physomerinus septemfoveolatus Schaufuss, L. W.", "Physomerinus septemfoveolatus") .species("Physomerinus", "septemfoveolatus") .combAuthors(null, "L.W.Schaufuss") .nothingElse(); assertName("Physomerinus septemfoveolatus Schaufuss, L. W., 1877", "Physomerinus septemfoveolatus") .species("Physomerinus", "septemfoveolatus") .combAuthors("1877", "L.W.Schaufuss") .code(ZOOLOGICAL) .nothingElse(); assertName("Euplectus cavicollis LeConte, J. L., 1878", "Euplectus cavicollis") .species("Euplectus", "cavicollis") .combAuthors("1878", "J.L.LeConte") .code(ZOOLOGICAL) .nothingElse(); } @Test public void boldPlaceholder() throws Exception { assertName("OdontellidaeGEN", GENUS, "Odontellidae GEN") .monomial("Odontellidae", GENUS) .strain("GEN") .type(PLACEHOLDER) .nothingElse(); assertName("EusiridaeNZD", ZOOLOGICAL,"Eusiridae NZD") .monomial("Eusiridae", FAMILY) .strain("NZD") .type(PLACEHOLDER) .code(ZOOLOGICAL) .nothingElse(); assertName("Blattellinae_SB","Blattellinae SB") .monomial("Blattellinae") .strain("SB") .type(PLACEHOLDER) .nothingElse(); assertName("GenusANIC_3","Genus ANIC_3") .monomial("Genus") .strain("ANIC_3") .type(PLACEHOLDER) //.warning(Warnings.BLACKLISTED_EPITHET) .nothingElse(); } @Test public void nullNameParts() throws Exception { assertName("Austrorhynchus pectatus null pectatus", "Austrorhynchus pectatus pectatus") .infraSpecies("Austrorhynchus", "pectatus", Rank.INFRASPECIFIC_NAME, "pectatus") .doubtful() .warning(Warnings.NULL_EPITHET) .nothingElse(); //assertName("Poa pratensis null proles (L.) Rouy, 1913", "Poa pratensis proles") // .infraSpecies("Poa", "pratensis", Rank.PROLES, "proles") // .basAuthors(null, "L.") // .combAuthors("1913", "Rouy") // .nothingElse(); // should the infrasubspecific epithet kewensis be removed from the parsed name? //assertParsedParts("Poa pratensis kewensis proles", NameType.INFORMAL, "Poa", "pratensis", "kewensis", Rank.PROLES, null); //assertParsedParts("Poa pratensis kewensis proles (L.) Rouy, 1913", NameType.INFORMAL, "Poa", "pratensis", null, Rank.PROLES, "Rouy", "1913", "L.", null); } @Test @Ignore public void rNANames() throws Exception { assertName("Calathus (Lindrothius) KURNAKOV 1961", "Calathus (Lindrothius)") .infraGeneric("Calathus", Rank.INFRAGENERIC_NAME, "Lindrothius") .combAuthors("1961", "Kurnakov") .nothingElse(); assertTrue(isViralName("Ustilaginoidea virens RNA virus")); assertTrue(isViralName("Rhizoctonia solani dsRNA virus 2")); assertName("Candida albicans RNA_CTR0-3", "Candida albicans RNA_CTR0-3") .species("Candida", "albicans") .nothingElse(); //pn = parser.parse("Alpha proteobacterium RNA12", null); //assertEquals("Alpha", pn.getGenusOrAbove()); //assertEquals("proteobacterium", pn.getSpecificEpithet()); //assertEquals(NameType.INFORMAL, pn.getType()); //assertNull(pn.getInfraSpecificEpithet()); //assertNull(pn.getAuthorship()); //pn = parser.parse("Armillaria ostoyae RNA1", null); //assertEquals("Armillaria", pn.getGenusOrAbove()); //assertEquals("ostoyae", pn.getSpecificEpithet()); //assertEquals(NameType.INFORMAL, pn.getType()); //assertNull(pn.getInfraSpecificEpithet()); //assertNull(pn.getAuthorship()); //assertUnparsableType(NameType.DOUBTFUL, "siRNA"); } @Test public void indetNames() throws Exception { assertName("Nitzschia sinuata var. (Grunow) Lange-Bert.", "Nitzschia sinuata var.") .infraSpecies("Nitzschia", "sinuata", Rank.VARIETY, null) .type(NameType.INFORMAL) .warning(Warnings.INDETERMINED) .nothingElse(); assertName("Canis lupus subsp. Linnaeus, 1758", "Canis lupus ssp.") .infraSpecies("Canis", "lupus", Rank.SUBSPECIES, null) .type(NameType.INFORMAL) .warning(Warnings.INDETERMINED) .nothingElse(); // assertName("Aphaenogaster (Ichnomyrmex) Schwammerdami var. spinipes", "Aphaenogaster var. spinipes") // .infraSpecies("Aphaenogaster", null, Rank.VARIETY, "spinipes") // .infraGeneric("Ichnomyrmex") // .type(NameType.INFORMAL) // .nothingElse(); // assertName("Ocymyrmex Weitzaeckeri subsp. arnoldi", "Ocymyrmex subsp. arnoldi") // .infraSpecies("Ocymyrmex", null, Rank.SUBSPECIES, "arnoldi") // .type(NameType.INFORMAL) // .nothingElse(); // assertName("Navicula var. fasciata", "Navicula var. fasciata") // .infraSpecies("Navicula", null, Rank.VARIETY, "fasciata") // .type(NameType.INFORMAL) // .nothingElse(); assertName("Polygonum spec.", "Polygonum sp.") .species("Polygonum", null) .type(NameType.INFORMAL) .warning(Warnings.INDETERMINED) .nothingElse(); assertName("Polygonum vulgaris ssp.", "Polygonum vulgaris ssp.") .infraSpecies("Polygonum", "vulgaris", Rank.SUBSPECIES, null) .type(NameType.INFORMAL) .warning(Warnings.INDETERMINED) .nothingElse(); assertName("Mesocricetus sp.", "Mesocricetus sp.") .species("Mesocricetus", null) .type(NameType.INFORMAL) .warning(Warnings.INDETERMINED) .nothingElse(); // dont treat these authorships as forms assertName("Dioscoreales Hooker f.", BOTANICAL, "Dioscoreales") .monomial("Dioscoreales", Rank.ORDER) .combAuthors(null, "Hooker f.") .code(BOTANICAL) .nothingElse(); assertName("Melastoma vacillans Blume var.", "Melastoma vacillans var.") .infraSpecies("Melastoma", "vacillans", Rank.VARIETY, null) .type(NameType.INFORMAL) .warning(Warnings.INDETERMINED) .nothingElse(); assertName("Lepidoptera Hooker", Rank.SPECIES, "Lepidoptera sp.") .species("Lepidoptera", null) .type(NameType.INFORMAL) .warning(Warnings.INDETERMINED) .nothingElse(); assertName("Lepidoptera alba DC.", Rank.SUBSPECIES, "Lepidoptera alba ssp.") .infraSpecies("Lepidoptera", "alba", Rank.SUBSPECIES, null) .type(NameType.INFORMAL) .warning(Warnings.INDETERMINED) .nothingElse(); } @Test public void rankMismatch() throws Exception { assertName("Polygonum", Rank.CULTIVAR, "Polygonum cv.") .cultivar("Polygonum", null) .type(INFORMAL) .warning(Warnings.INDETERMINED) .nothingElse(); assertName("Polygonum", Rank.SUBSPECIES, "Polygonum subsp.") .indet("Polygonum", null, Rank.SUBSPECIES) .warning(Warnings.INDETERMINED) .nothingElse(); assertName("Polygonum alba", Rank.GENUS, "Polygonum alba") .binomial("Polygonum", null, "alba", Rank.GENUS) .type(INFORMAL) .doubtful() .warning(Warnings.RANK_MISMATCH) .nothingElse(); } @Test public void vulpes() throws Exception { assertName("Vulpes vulpes sp. silaceus Miller, 1907", "Vulpes vulpes silaceus") .infraSpecies("Vulpes", "vulpes", Rank.SUBSPECIES, "silaceus") .combAuthors("1907", "Miller") .warning(Warnings.SUBSPECIES_ASSIGNED) .code(ZOOLOGICAL) .nothingElse(); } @Test public void microbialRanks2() throws Exception { assertName("Puccinia graminis f. sp. avenae", "Puccinia graminis f.sp. avenae") .infraSpecies("Puccinia", "graminis", Rank.FORMA_SPECIALIS, "avenae") .code(NomCode.BACTERIAL) .nothingElse(); } @Test public void chineseAuthors() throws Exception { assertName("Abaxisotima acuminata (Wang & Liu, 1996)", "Abaxisotima acuminata") .species("Abaxisotima", "acuminata") .basAuthors("1996", "Wang", "Liu") .code(ZOOLOGICAL) .nothingElse(); assertName("Abaxisotima acuminata (Wang, Yuwen & Xian-wei Liu, 1996)", "Abaxisotima acuminata") .species("Abaxisotima", "acuminata") .basAuthors("1996", "Wang", "Yuwen", "Xian-wei Liu") .code(ZOOLOGICAL) .nothingElse(); assertName("Abaxisotima bicolor (Liu, Xian-wei, Z. Zheng & G. Xi, 1991)", "Abaxisotima bicolor") .species("Abaxisotima", "bicolor") .basAuthors("1991", "Liu", "Xian-wei", "Z.Zheng", "G.Xi") .code(ZOOLOGICAL) .nothingElse(); } @Test public void fungusNames() throws Exception { assertName("Merulius lacrimans (Wulfen : Fr.) Schum.", "Merulius lacrimans") .species("Merulius", "lacrimans") .combAuthors(null, "Schum.") .basAuthors(null, "Wulfen") .code(BOTANICAL) .nothingElse(); assertName("Merulius lacrimans (Wulfen) Schum. : Fr.", "Merulius lacrimans") .species("Merulius", "lacrimans") .combAuthors(null, "Schum.") .basAuthors(null, "Wulfen") .sanctAuthor("Fr.") .code(BOTANICAL) .nothingElse(); //assertParsedParts("", null, "Merulius", "lacrimans", null, null, "Schum.", null, "Wulfen : Fr.", null); //assertParsedParts("Aecidium berberidis Pers. ex J.F. Gmel.", null, "Aecidium", "berberidis", null, null, "Pers. ex J.F. Gmel.", null, null, null); //assertParsedParts("Mycosphaerella eryngii (Fr. Duby) ex Oudem., 1897", null, "Mycosphaerella", "eryngii", null, null, "ex Oudem.", "1897", "Fr. Duby", null); //assertParsedParts("Mycosphaerella eryngii (Fr.ex Duby) ex Oudem. 1897", null, "Mycosphaerella", "eryngii", null, null, "ex Oudem.", "1897", "Fr.ex Duby", null); //assertParsedParts("Mycosphaerella eryngii (Fr. ex Duby) Johanson ex Oudem. 1897", null, "Mycosphaerella", "eryngii", null, null, "Johanson ex Oudem.", "1897", "Fr. ex Duby", null); } @Test public void yearVariations() throws Exception { assertName("Deudorix epijarbas turbo Fruhstorfer, [1912]", "Deudorix epijarbas turbo") .infraSpecies("Deudorix", "epijarbas", Rank.INFRASPECIFIC_NAME, "turbo") .combAuthors("1912", "Fruhstorfer") .code(NomCode.ZOOLOGICAL) .nothingElse(); } @Test public void hyphens() throws Exception { assertName("Minilimosina v-atrum (Villeneuve, 1917)", "Minilimosina v-atrum") .species("Minilimosina", "v-atrum") .basAuthors("1917", "Villeneuve") .code(ZOOLOGICAL) .nothingElse(); assertName("Aelurillus v-insignitus", "Aelurillus v-insignitus") .species("Aelurillus", "v-insignitus") .nothingElse(); assertName("Desmometopa m-nigrum", "Desmometopa m-nigrum") .species("Desmometopa", "m-nigrum") .nothingElse(); assertName("Chloroclystis v-ata", "Chloroclystis v-ata") .species("Chloroclystis", "v-ata") .nothingElse(); assertName("Cortinarius moenne-loccozii Bidaud", "Cortinarius moenne-loccozii") .species("Cortinarius", "moenne-loccozii") .combAuthors(null, "Bidaud") .nothingElse(); assertName("Asarum sieboldii f. non-maculatum (Y.N.Lee) M.Kim", "Asarum sieboldii f. non-maculatum") .infraSpecies("Asarum", "sieboldii", FORM, "non-maculatum") .combAuthors(null, "M.Kim") .basAuthors(null, "Y.N.Lee") .code(BOTANICAL) .nothingElse(); } @Test public void imprintYears() throws Exception { assertName("Ophidocampa tapacumae Ehrenberg, 1870, 1869", "Ophidocampa tapacumae") .species("Ophidocampa", "tapacumae") .combAuthors("1870", "Ehrenberg") .code(ZOOLOGICAL) .nothingElse(); assertName("Brachyspira Hovind-Hougen, Birch-Andersen, Henrik-Nielsen, Orholm, Pedersen, Teglbjaerg & Thaysen, 1983, 1982", "Brachyspira") .monomial("Brachyspira") .combAuthors("1983", "Hovind-Hougen", "Birch-Andersen", "Henrik-Nielsen", "Orholm", "Pedersen", "Teglbjaerg", "Thaysen") .code(ZOOLOGICAL) .nothingElse(); assertName("Gyrosigma angulatum var. gamma Griffith & Henfrey, 1860, 1856", "Gyrosigma angulatum var. gamma") .infraSpecies("Gyrosigma", "angulatum", Rank.VARIETY, "gamma") .combAuthors("1860", "Griffith", "Henfrey") .code(ZOOLOGICAL) .nothingElse(); assertName("Ctenotus alacer Storr, 1970 [\"1969\"]", "Ctenotus alacer") .species("Ctenotus", "alacer") .combAuthors("1970", "Storr") .code(ZOOLOGICAL) .nothingElse(); assertName("Ctenotus alacer Storr, 1970 (imprint 1969)", "Ctenotus alacer") .species("Ctenotus", "alacer") .combAuthors("1970", "Storr") .code(ZOOLOGICAL) .nothingElse(); assertName("Ctenotus alacer Storr, 1887 (\"1886-1888\")", "Ctenotus alacer") .species("Ctenotus", "alacer") .combAuthors("1887", "Storr") .code(ZOOLOGICAL) .nothingElse(); assertName("Melanargia halimede menetriesi Wagener, 1959 & 1961", "Melanargia halimede menetriesi") .infraSpecies("Melanargia", "halimede", Rank.INFRASPECIFIC_NAME, "menetriesi") .combAuthors("1959", "Wagener") .code(ZOOLOGICAL) .nothingElse(); } @Test public void lowerCaseNames() throws Exception { assertName("abies alba Mill.", "Abies alba") .species("Abies", "alba") .combAuthors(null, "Mill.") .type(SCIENTIFIC) .nothingElse(); } @Test public void manuscriptNames() throws Exception { assertName("Abrodictyum caespifrons (C. Chr.) comb. ined.", "Abrodictyum caespifrons") .species("Abrodictyum", "caespifrons") .basAuthors(null, "C.Chr.") .type(SCIENTIFIC) .nomNote("comb.ined.") .manuscript() .nothingElse(); assertName("Acranthera virescens (Ridl.) ined.", "Acranthera virescens") .species("Acranthera", "virescens") .basAuthors(null, "Ridl.") .type(SCIENTIFIC) .nomNote("ined.") .manuscript() .nothingElse(); assertName("Micromeria cristata subsp. kosaninii ( ilic) ined.", "Micromeria cristata subsp. kosaninii") .infraSpecies("Micromeria", "cristata", SUBSPECIES, "kosaninii") //.basAuthors(null, "ilic") .partial("(ilic)") .type(SCIENTIFIC) .nomNote("ined.") .manuscript() .code(BOTANICAL) .nothingElse(); assertName("Lepidoptera sp. JGP0404", "Lepidoptera sp.JGP0404") .species("Lepidoptera", "sp.JGP0404") .type(INFORMAL) .manuscript() .nothingElse(); assertName("Genoplesium vernalis D.L.Jones ms.", "Genoplesium vernalis") .species("Genoplesium", "vernalis") .combAuthors(null, "D.L.Jones") .type(SCIENTIFIC) .manuscript() .nomNote("ms.") .nothingElse(); assertName("Verticordia sp.1", "Verticordia sp.1") .species("Verticordia", "sp.1") .type(INFORMAL) .manuscript() .nothingElse(); assertName("Bryozoan indet. 1", "Bryozoan indet.1") .species("Bryozoan", "indet.1") .type(INFORMAL) .manuscript() .nothingElse(); assertName("Bryozoan sp. E", "Bryozoan sp.E") .species("Bryozoan", "sp.E") .type(INFORMAL) .manuscript() .nothingElse(); assertName("Prostanthera sp. Somersbey (B.J.Conn 4024)", "Prostanthera sp.Somersbey(B.J.Conn 4024)") .species("Prostanthera", "sp.Somersbey(B.J.Conn 4024)") .type(INFORMAL) .manuscript() .nothingElse(); } @Test public void unsupportedAuthors() throws Exception { assertName(" Anolis marmoratus girafus LAZELL 1964: 377", "Anolis marmoratus girafus") .infraSpecies("Anolis", "marmoratus", Rank.INFRASPECIFIC_NAME, "girafus") .combAuthors("1964", "Lazell") .partial(":377") .code(ZOOLOGICAL) .nothingElse(); } // HELPER METHODS public boolean isViralName(String name) { try { parser.parse(name, null); } catch (UnparsableNameException e) { // swallow if (NameType.VIRUS == e.getType()) { return true; } } return false; } private void assertNoName(String name) { assertUnparsable(name, NO_NAME); } private void assertUnparsable(String name, NameType type) { assertUnparsableName(name, Rank.UNRANKED, type, name); } private void assertUnparsable(String name, Rank rank, NameType type) { assertUnparsableName(name, rank, type, name); } private void assertUnparsableName(String name, Rank rank, NameType type, String expectedName) { try { parser.parse(name, rank, null); fail("Expected " + name + " to be unparsable"); } catch (UnparsableNameException ex) { assertEquals(type, ex.getType()); assertEquals(expectedName, ex.getName()); } } NameAssertion assertName(String rawName, String expectedCanonicalWithoutAuthors) throws UnparsableNameException { return assertName(rawName, null, null, expectedCanonicalWithoutAuthors); } NameAssertion assertName(String rawName, Rank rank, String expectedCanonicalWithoutAuthors) throws UnparsableNameException { return assertName(rawName, rank, null, expectedCanonicalWithoutAuthors); } NameAssertion assertName(String rawName, NomCode code, String expectedCanonicalWithoutAuthors) throws UnparsableNameException { return assertName(rawName, null, code, expectedCanonicalWithoutAuthors); } NameAssertion assertName(String rawName, Rank rank, NomCode code, String expectedCanonicalWithoutAuthors) throws UnparsableNameException { ParsedName n = parser.parse(rawName, rank, code); assertEquals(expectedCanonicalWithoutAuthors, n.canonicalNameWithoutAuthorship()); return new NameAssertion(n); } private BufferedReader resourceReader(String resourceFileName) throws UnsupportedEncodingException { return new BufferedReader(new InputStreamReader(getClass().getResourceAsStream("/" + resourceFileName), "UTF-8")); } }
package com.magenta.guice.jpa; import com.google.inject.AbstractModule; public class JPAModule extends AbstractModule { @Override protected void configure() { DBInterceptor.bind(binder()); } }
package io.hosuaby.restful.repositories; import io.hosuaby.restful.domain.Teapot; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.springframework.data.repository.CrudRepository; import org.springframework.stereotype.Repository; /** * Map based repository for teapots. No exceptions are thrown at the repository * level. This repository is not thread safe. Thread safe must be assured at * service layer. */ @Repository public class TeapotRepository implements CrudRepository<Teapot, String> { /** * Map store for the teapots. */ private Map<String, Teapot> teapotStore; /** * Default constructor. */ public TeapotRepository() { teapotStore = new HashMap<>(); } /** {@inheritDoc} */ @Override public long count() { return teapotStore.size(); } /** {@inheritDoc} */ @Override public void delete(String id) { teapotStore.remove(id); } /** {@inheritDoc} */ @Override public void delete(Teapot teapot) { teapotStore.remove(teapot.getId()); } /** {@inheritDoc} */ @Override public void delete(Iterable<? extends Teapot> teapots) { for (Teapot teapot : teapots) { teapotStore.remove(teapot.getId()); } } /** {@inheritDoc} */ @Override public void deleteAll() { teapotStore.clear(); } /** {@inheritDoc} */ @Override public boolean exists(String id) { return teapotStore.containsKey(id); } /** {@inheritDoc} */ @Override public Iterable<Teapot> findAll() { return teapotStore.values(); } /** {@inheritDoc} */ @Override public Iterable<Teapot> findAll(Iterable<String> ids) { Set<Teapot> teapots = new HashSet<>(); for (String id : ids) { Teapot teapot = teapotStore.get(id); if (teapot != null) { teapots.add(teapot); } } return teapots; } /** {@inheritDoc} */ @Override public Teapot findOne(String id) { return teapotStore.get(id); } /** {@inheritDoc} */ @Override public <S extends Teapot> S save(S teapot) { return (S) teapotStore.put(teapot.getId(), teapot); } /** {@inheritDoc} */ @Override public <S extends Teapot> Iterable<S> save(Iterable<S> teapots) { Set<S> savedTeapots = new HashSet<>(); for (S teapot : teapots) { savedTeapots.add((S) teapotStore.put(teapot.getId(), teapot)); } return savedTeapots; } }
package water.init; import water.Iced; import water.util.Log; import java.io.*; import java.net.URI; import java.util.ArrayList; import java.util.regex.Pattern; public class NodePersistentStorage { String NPS_DIR; public static class NodePersistentStorageEntry extends Iced { public String _category; public String _name; public long _size; public long _timestamp_millis; } public static void copyStream(InputStream is, OutputStream os) { final int buffer_size=1024; try { byte[] bytes=new byte[buffer_size]; for(;;) { int count=is.read(bytes, 0, buffer_size); if(count==-1) break; os.write(bytes, 0, count); } } catch(Exception ex) { throw new RuntimeException(ex); } } public NodePersistentStorage(URI npsDirParentURI) { NPS_DIR = npsDirParentURI.toString() + File.separator + "h2onps"; } private void validateCategoryName(String categoryName) { if (categoryName == null) { throw new IllegalArgumentException("NodePersistentStorage category not specified"); } if (! Pattern.matches("[\\-a-zA-Z0-9]+", categoryName)) { throw new IllegalArgumentException("NodePersistentStorage illegal category"); } } private void validateKeyName(String keyName) { if (keyName == null) { throw new IllegalArgumentException("NodePersistentStorage name not specified"); } if (! Pattern.matches("[\\-a-zA-Z0-9]+", keyName)) { throw new IllegalArgumentException("NodePersistentStorage illegal name"); } } public void put(String categoryName, String keyName, InputStream is) { Log.info("NPS put content category(" + categoryName + ") keyName(" + keyName + ")"); // Error checking validateCategoryName(categoryName); validateKeyName(keyName); // Create common directories File d = new File(NPS_DIR); if (! d.exists()) { boolean success = d.mkdir(); if (! success) { throw new RuntimeException("Could not make NodePersistentStorage directory (" + d + ")"); } } if (! d.exists()) { throw new RuntimeException("NodePersistentStorage directory does not exist (" + d + ")"); } File tmpd = new File(d + File.separator + "_tmp"); if (! tmpd.exists()) { boolean success = tmpd.mkdir(); if (! success) { throw new RuntimeException("Could not make NodePersistentStorage category directory (" + tmpd + ")"); } } if (! tmpd.exists()) { throw new RuntimeException("NodePersistentStorage category directory does not exist (" + tmpd + ")"); } // Create category directory File d2 = new File(d + File.separator + categoryName); if (! d2.exists()) { boolean success = d2.mkdir(); if (! success) { throw new RuntimeException("Could not make NodePersistentStorage category directory (" + d2 + ")"); } } if (! d2.exists()) { throw new RuntimeException("NodePersistentStorage category directory does not exist (" + d2 + ")"); } // Create tmp file File tmpf = new File(tmpd + File.separator + keyName); FileOutputStream fos = null; try { fos = new FileOutputStream(tmpf); copyStream(is, fos); } catch (Exception e) { throw new RuntimeException(e); } finally { try { if (fos != null) { fos.close(); } } catch (Exception ignore) {} } // Move tmp file to final spot File realf = new File(d2 + File.separator + keyName); try { boolean success = tmpf.renameTo(realf); if (! success) { throw new RuntimeException("NodePersistentStorage move failed (" + tmpf + " -> " + realf + ")"); } if (! realf.exists()) { throw new RuntimeException("NodePersistentStorage file does not exist (" + realf + ")"); } } catch (Exception e) { throw new RuntimeException(e); } Log.info("Put succeeded"); } public void put(String categoryName, String keyName, String value) { validateCategoryName(categoryName); validateKeyName(keyName); InputStream is = new ByteArrayInputStream(value.getBytes()); put(categoryName, keyName, is); } public NodePersistentStorageEntry[] list(String categoryName) { validateCategoryName(categoryName); String dirName = NPS_DIR + File.separator + categoryName; File dir = new File(dirName); File[] files = dir.listFiles(); if (files == null) { return new NodePersistentStorageEntry[0]; } ArrayList<NodePersistentStorageEntry> arr = new ArrayList<>(); for (File f : files) { NodePersistentStorageEntry entry = new NodePersistentStorageEntry(); entry._category = categoryName; entry._name = f.getName(); entry._size = f.length(); entry._timestamp_millis = f.lastModified(); arr.add(entry); } return arr.toArray(new NodePersistentStorageEntry[arr.size()]); } public String get_as_string(String categoryName, String keyName) { validateCategoryName(categoryName); validateKeyName(keyName); try { String fileName = NPS_DIR + File.separator + categoryName + File.separator + keyName; BufferedReader reader = new BufferedReader(new FileReader(fileName)); String line; StringBuilder stringBuilder = new StringBuilder(); String lineseparator = "\n"; while ((line = reader.readLine()) != null) { stringBuilder.append(line); stringBuilder.append(lineseparator); } return stringBuilder.toString(); } catch (Exception e) { throw new RuntimeException(e); } } public void delete(String categoryName, String keyName) { validateCategoryName(categoryName); validateKeyName(keyName); String fileName = NPS_DIR + File.separator + categoryName + File.separator + keyName; File f = new File(fileName); if (! f.exists()) { return; } boolean success = f.delete(); if (! success) { throw new RuntimeException("NodePersistentStorage delete failed (" + fileName + ")"); } } }
package io.klerch.alexa.state.handler; import com.amazon.speech.speechlet.Session; import com.amazonaws.services.iot.AWSIot; import com.amazonaws.services.iot.AWSIotClient; import com.amazonaws.services.iot.model.*; import com.amazonaws.services.iotdata.AWSIotData; import com.amazonaws.services.iotdata.AWSIotDataClient; import com.amazonaws.services.iotdata.model.GetThingShadowRequest; import com.amazonaws.services.iotdata.model.GetThingShadowResult; import com.amazonaws.services.iotdata.model.UpdateThingShadowRequest; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import io.klerch.alexa.state.model.AlexaScope; import io.klerch.alexa.state.model.AlexaStateModel; import io.klerch.alexa.state.utils.AlexaStateException; import io.klerch.alexa.state.utils.EncryptUtils; import org.apache.log4j.Logger; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.List; import java.util.Optional; public class AWSIotStateHandler extends AlexaSessionStateHandler { private final Logger log = Logger.getLogger(AWSS3StateHandler.class); private final AWSIot awsClient; private final AWSIotData awsDataClient; private final String thingAttributeName = "name"; private final String thingAttributeUser = "amzn-user-id"; private final String thingAttributeApp = "amzn-app-id"; private List<String> thingsExisting = new ArrayList<>(); public AWSIotStateHandler(final Session session) { this(session, new AWSIotClient(), new AWSIotDataClient()); } public AWSIotStateHandler(final Session session, final AWSIot awsClient, final AWSIotData awsDataClient) { super(session); this.awsClient = awsClient; this.awsDataClient = awsDataClient; } /** * Returns the AWS connection client used by this handler to manage resources * in AWS IoT. * @return AWS connection client for AWS IoT */ public AWSIot getAwsClient() { return this.awsClient; } /** * Returns the AWS connection client used by this handler to store model states in * thing shadows of AWS IoT. * @return AWS data connection client for AWS IoT */ public AWSIotData getAwsDataClient() { return this.awsDataClient; } /** * {@inheritDoc} */ @Override public void writeModel(final AlexaStateModel model) throws AlexaStateException { // write to session super.writeModel(model); if (model.hasUserScopedField()) { publishState(model, AlexaScope.USER); } if (model.hasApplicationScopedField()) { publishState(model, AlexaScope.APPLICATION); } } /** * {@inheritDoc} */ @Override public <TModel extends AlexaStateModel> Optional<TModel> readModel(final Class<TModel> modelClass) throws AlexaStateException { return this.readModel(modelClass, null); } /** * {@inheritDoc} */ @Override public void removeModel(final AlexaStateModel model) throws AlexaStateException { super.removeModel(model); if (model.hasSessionScopedField() || model.hasUserScopedField()) { removeModelFromShadow(model, AlexaScope.USER); } if (model.hasApplicationScopedField()) { removeModelFromShadow(model, AlexaScope.APPLICATION); } log.debug(String.format("Removed state from AWS IoT shadow for '%1$s'.", model)); } /** * {@inheritDoc} */ @Override public <TModel extends AlexaStateModel> Optional<TModel> readModel(final Class<TModel> modelClass, final String id) throws AlexaStateException { // if there is nothing for this model in the session ... final Optional<TModel> modelSession = super.readModel(modelClass, id); // create new model with given id. for now we assume a model exists for this id. we find out by // reading file from the bucket in the following lines. only if this is true model will be written back to session final TModel model = modelSession.orElse(createModel(modelClass, id)); // we need to remember if there will be something from thing shadow to be written to the model // in order to write those values back to the session at the end of this method Boolean modelChanged = false; // and if there are user-scoped fields ... if (model.hasUserScopedField() && fromThingShadowToModel(model, AlexaScope.USER)) { modelChanged = true; } // and if there are app-scoped fields ... if (model.hasApplicationScopedField() && fromThingShadowToModel(model, AlexaScope.APPLICATION)) { modelChanged = true; } // so if model changed from within something out of the shadow we want this to be in the speechlet as well // this gives you access to user- and app-scoped attributes throughout a session without reading from S3 over and over again if (modelChanged) { super.writeModel(model); return Optional.of(model); } else { // if there was nothing received from IOT and there is nothing to return from session // then its not worth return the model. better indicate this model does not exist return modelSession.isPresent() ? Optional.of(model) : Optional.empty(); } } /** * Returns name of the thing whose shadow is updated by this handler. It depends on * the scope of the fields persisted in AWS IoT as APPLICATION-scoped fields go to a different * thing shadow than USER-scoped fields. * @param scope The scope this thing is dedicated to * @return Name of the thing for this scope * @throws AlexaStateException Any error regarding thing name generation */ public String getThingName(final AlexaScope scope) throws AlexaStateException { return AlexaScope.APPLICATION.includes(scope) ? getAppScopedThingName() : getUserScopedThingName(); } /** * The thing will be created in AWS IoT if not existing for this application (when scope * APPLICATION is given) or for this user in this application (when scope USER is given) * @param scope The scope this thing is dedicated to * @throws AlexaStateException Any error regarding thing creation or existence check */ public void createThingIfNotExisting(final AlexaScope scope) throws AlexaStateException { final String thingName = getThingName(scope); if (!doesThingExist(thingName)) { createThing(thingName, scope); } } /** * Returns if the thing dedicated to the scope given is existing in AWS IoT. * @param scope The scope this thing is dedicated to * @return True, if the thing dedicated to the scope given is existing in AWS IoT. * @throws AlexaStateException Any error regarding thing creation or existence check */ public boolean doesThingExist(final AlexaScope scope) throws AlexaStateException { final String thingName = getThingName(scope); return doesThingExist(thingName); } private void removeModelFromShadow(final AlexaStateModel model, final AlexaScope scope) throws AlexaStateException { final String nodeName = model.getAttributeKey(); final String thingName = getThingName(scope); final String thingState = getState(scope); try { final ObjectMapper mapper = new ObjectMapper(); final JsonNode root = mapper.readTree(thingState); if (!root.isMissingNode()) { final JsonNode desired = root.path("state").path("desired"); if (!desired.isMissingNode() && desired instanceof ObjectNode) { ((ObjectNode) desired).remove(nodeName); } } final String json = mapper.writeValueAsString(root); publishState(thingName, json); } catch (IOException e) { final String error = String.format("Could not extract model state of '%1$s' from thing shadow '%2$s'", model, thingName); log.error(error, e); throw AlexaStateException.create(error).withCause(e).withModel(model).build(); } } private boolean fromThingShadowToModel(final AlexaStateModel model, final AlexaScope scope) throws AlexaStateException { // read from item with scoped model final String thingName = getThingName(scope); final String thingState = getState(scope); final String nodeName = model.getAttributeKey(); try { final ObjectMapper mapper = new ObjectMapper(); final JsonNode node = mapper.readTree(thingState).path("state").path("reported").path(nodeName); if (!node.isMissingNode()) { final String json = mapper.writeValueAsString(node); return model.fromJSON(json, scope); } } catch (IOException e) { final String error = String.format("Could not extract model state of '%1$s' from thing shadow '%2$s'", model, thingName); log.error(error, e); throw AlexaStateException.create(error).withCause(e).withModel(model).build(); } return false; } /** * Returns the name of the thing which is used to store model state scoped * as USER * @return Thing name for user-wide model state * @throws AlexaStateException some exceptions may occure when encrypting the user-id */ public String getUserScopedThingName() throws AlexaStateException { // user-ids in Alexa are too long for thing names in AWS IOT. // use the SHA1-hash of the user-id final String userHash; try { userHash = EncryptUtils.encryptSha1(session.getUser().getUserId()); } catch (NoSuchAlgorithmException | UnsupportedEncodingException e) { final String error = "Could not encrypt user-id for generating the IOT thing-name"; log.error(error, e); throw AlexaStateException.create(error).withHandler(this).withCause(e).build(); } return getAppScopedThingName() + "-" + userHash; } /** * Returns the name of the thing which is used to store model state scoped * as APPLICATION * @return Thing name for application-wide model state */ public String getAppScopedThingName() { // thing names do not allow dots in it return session.getApplication().getApplicationId().replace(".", "-"); } private String getState(final AlexaScope scope) throws AlexaStateException { final String thingName = getThingName(scope); createThingIfNotExisting(scope); final GetThingShadowRequest awsRequest = new GetThingShadowRequest().withThingName(thingName); try { final GetThingShadowResult response = awsDataClient.getThingShadow(awsRequest); final ByteBuffer buffer = response.getPayload(); try { return (buffer != null && buffer.hasArray()) ? new String(buffer.array(), "UTF-8") : "{}"; } catch (UnsupportedEncodingException e) { final String error = String.format("Could not handle received contents of thing-shadow '%1$s'", thingName); log.error(error, e); throw AlexaStateException.create(error).withCause(e).withHandler(this).build(); } } // if a thing does not have a shadow this is a usual exception catch (com.amazonaws.services.iotdata.model.ResourceNotFoundException e) { log.info(e); // we are fine with a thing having no shadow what just means there's nothing to read out for the model // return an empty JSON to indicate nothing is in the thing shadow return "{}"; } } private void publishState(final AlexaStateModel model, final AlexaScope scope) throws AlexaStateException { final String thingName = getThingName(scope); createThingIfNotExisting(scope); final String payload = "{\"state\":{\"desired\":{\"" + model.getAttributeKey() + "\":" + model.toJSON(scope) + "}}}"; publishState(thingName, payload); } private void publishState(final String thingName, final String json) throws AlexaStateException { final ByteBuffer buffer; try { buffer = ByteBuffer.wrap(json.getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { final String error = String.format("Could not prepare JSON for model state publication to thing shadow '%1$s'", thingName); log.error(error, e); throw AlexaStateException.create(error).withCause(e).withHandler(this).build(); } final UpdateThingShadowRequest iotRequest = new UpdateThingShadowRequest().withThingName(thingName).withPayload(buffer); awsDataClient.updateThingShadow(iotRequest); } private void createThing(final String thingName, final AlexaScope scope) { // only create thing if not already existing final AttributePayload attrPayload = new AttributePayload(); // add thing name as attribute as well. this is how the handler queries for the thing from now on attrPayload.addAttributesEntry(thingAttributeName, thingName); // if scope is user an attribute saves the plain user id as it is encrypted in the thing name if (AlexaScope.USER.includes(scope)) { attrPayload.addAttributesEntry(thingAttributeUser, session.getUser().getUserId()); } // another thing attributes holds the Alexa application-id attrPayload.addAttributesEntry(thingAttributeApp, session.getApplication().getApplicationId()); // now create the thing final CreateThingRequest request = new CreateThingRequest().withThingName(thingName).withAttributePayload(attrPayload); awsClient.createThing(request); } private boolean doesThingExist(final String thingName) { // if already checked existence than return immediately if (thingsExisting.contains(thingName)) return true; // query by an attribute having the name of the thing // unfortunately you can only query for things with their attributes, not directly with their names final ListThingsRequest request = new ListThingsRequest().withAttributeName(thingAttributeName).withAttributeValue(thingName).withMaxResults(1); final ListThingsResult result = awsClient.listThings(request); if(result != null && result.getThings() != null && result.getThings().isEmpty()) { thingsExisting.add(thingName); return true; } return false; } }
package io.opentracing.contrib.jdbi; import io.opentracing.Span; import io.opentracing.Tracer; import org.skife.jdbi.v2.SQLStatement; import org.skife.jdbi.v2.StatementContext; import org.skife.jdbi.v2.TimingCollector; /** * OpenTracingCollector is a JDBI TimingCollector that creates OpenTracing Spans for each JDBI SQLStatement. * * <p>Example usage: * <pre>{@code * io.opentracing.Tracer tracer = ...; * DBI dbi = ...; * * // One time only: bind OpenTracing to the DBI instance as a TimingCollector. * dbi.setTimingCollector(new OpenTracingCollector(tracer)); * * // Elsewhere, anywhere a `Handle` is available: * Handle handle = ...; * Span parentSpan = ...; // optional * * // Create statements as usual with your `handle` instance. * Query<Map<String, Object>> statement = handle.createQuery("SELECT COUNT(*) FROM accounts"); * * // If a parent Span is available, establish the relationship via setParent. * OpenTracingCollector.setParent(statement, parent); * * // Use JDBI as per usual, and Spans will be created for every SQLStatement automatically. * List<Map<String, Object>> results = statement.list(); * }</pre> */ public class OpenTracingCollector implements TimingCollector { public final static String PARENT_SPAN_ATTRIBUTE_KEY = "io.opentracing.parent"; private final Tracer tracer; private final SpanDecorator spanDecorator; private final ActiveSpanSource activeSpanSource; public OpenTracingCollector(Tracer tracer) { this(tracer, SpanDecorator.DEFAULT); } public OpenTracingCollector(Tracer tracer, SpanDecorator spanDecorator) { this(tracer, spanDecorator, null); } public OpenTracingCollector(Tracer tracer, ActiveSpanSource spanSource) { this(tracer, SpanDecorator.DEFAULT, spanSource); } public OpenTracingCollector(Tracer tracer, SpanDecorator operationNamer, ActiveSpanSource spanSource) { this.tracer = tracer; this.spanDecorator = operationNamer; this.activeSpanSource = spanSource; } public void collect(long elapsedNanos, StatementContext statementContext) { long nowMicros = System.currentTimeMillis() * 1000; Tracer.SpanBuilder builder = tracer .buildSpan(spanDecorator.generateOperationName(statementContext)) .withStartTimestamp(nowMicros - (elapsedNanos / 1000)); Span parent = (Span)statementContext.getAttribute(PARENT_SPAN_ATTRIBUTE_KEY); if (parent == null && this.activeSpanSource != null) { parent = this.activeSpanSource.activeSpan(statementContext); } if (parent != null) { builder = builder.asChildOf(parent); } Span collectSpan = builder.start(); spanDecorator.decorateSpan(collectSpan, elapsedNanos, statementContext); try { collectSpan.log("SQL query finished", statementContext.getRawSql()); } finally { collectSpan.finish(nowMicros); } } /** * Establish an explicit parent relationship for the (child) Span associated with a SQLStatement. * * @param statement the JDBI SQLStatement which will act as the child of `parent` * @param parent the parent Span for `statement` */ public static void setParent(SQLStatement<?> statement, Span parent) { statement.getContext().setAttribute(PARENT_SPAN_ATTRIBUTE_KEY, parent); } /** * SpanDecorator allows the OpenTracingCollector user to control the precise naming and decoration of OpenTracing * Spans emitted by the collector. * * @see OpenTracingCollector#OpenTracingCollector(Tracer, SpanDecorator) */ public interface SpanDecorator { public static SpanDecorator DEFAULT = new SpanDecorator() { public String generateOperationName(StatementContext ctx) { return "DBI Statement"; } @Override public void decorateSpan(Span jdbiSpan, long elapsedNanos, StatementContext ctx) { // (by default, do nothing) } }; /** * Transform an DBI StatementContext into an OpenTracing Span operation name. * * @param ctx the StatementContext passed to TimingCollector.collect() * @return an operation name suitable for the associated OpenTracing Span */ public String generateOperationName(StatementContext ctx); /** * Get the active Span (to use as a parent for any DBI Spans). Implementations may or may not need to refer * to the StatementContext. * * @param jdbiSpan the JDBI Span to decorate (before `finish` is called) * @param elapsedNanos the elapsedNanos passed to TimingCollector.collect() * @param ctx the StatementContext passed to TimingCollector.collect() */ public void decorateSpan(Span jdbiSpan, long elapsedNanos, StatementContext ctx); } /** * An abstract API that allows the OpenTracingCollector to customize how parent Spans are discovered. * * For instance, if Spans are stored in a thread-local variable, an ActiveSpanSource could access them like so: * <p>Example usage: * <pre>{@code * public class SomeClass { * // Thread local variable containing each thread's ID * private static final ThreadLocal<Span> activeSpan = * new ThreadLocal<Span>() { * protected Integer initialValue() { * return null; * } * }; * }; * * ... elsewhere ... * ActiveSpanSource spanSource = new ActiveSpanSource() { * public Span activeSpan(StatementContext ctx) { * // (In this example we ignore `ctx` entirely) * return activeSpan.get(); * } * }; * OpenTracingCollector otColl = new OpenTracingCollector(tracer, spanSource); * ... * }</pre> */ public interface ActiveSpanSource { /** * Get the active Span (to use as a parent for any DBI Spans). Implementations may or may not need to refer * to the StatementContext. * * @param ctx the StatementContext that needs to be collected and traced * @return the currently active Span (for this thread, etc), or null if no such Span could be found. */ public Span activeSpan(StatementContext ctx); } }
package florian_haas.lucas.database; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.criteria.*; import florian_haas.lucas.model.EntityBase; import florian_haas.lucas.util.QuadFunction; public interface ReadOnlyDAO<E extends EntityBase> { public EntityManager getEntityManager(); public Class<E> getEntityClass(); public List<E> findAll(); public E findById(Long id); public Boolean exists(Long id); public E refresh(E entity); public List<E> readOnlyJPQLQuery(String jpql, Object... params); public List<E> readOnlyCriteriaQuery( QuadFunction<CriteriaQuery<E>, Root<EntityBase>, Root<E>, CriteriaBuilder, Predicate[]> restrictions); }
package be.ibridge.kettle.trans.step.socketwriter; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.net.ServerSocket; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import be.ibridge.kettle.core.Const; import be.ibridge.kettle.core.Row; import be.ibridge.kettle.core.exception.KettleException; import be.ibridge.kettle.core.util.StringUtil; import be.ibridge.kettle.trans.Trans; import be.ibridge.kettle.trans.TransMeta; import be.ibridge.kettle.trans.step.BaseStep; import be.ibridge.kettle.trans.step.StepDataInterface; import be.ibridge.kettle.trans.step.StepInterface; import be.ibridge.kettle.trans.step.StepMeta; import be.ibridge.kettle.trans.step.StepMetaInterface; /** * Do nothing. Pass all input data to the next steps. * * @author Matt * @since 2-jun-2003 */ public class SocketWriter extends BaseStep implements StepInterface { private SocketWriterMeta meta; private SocketWriterData data; public SocketWriter(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans) { super(stepMeta, stepDataInterface, copyNr, transMeta, trans); } public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta=(SocketWriterMeta)smi; data=(SocketWriterData)sdi; Row r=getRow(); // get row, set busy! if (r==null) // no more input to be expected... { setOutputDone(); return false; } try { if (first) { int bufferSize = Const.toInt( StringUtil.environmentSubstitute(meta.getBufferSize()), 1000); data.clientSocket = data.serverSocket.accept(); if (meta.isCompressed()) { data.outputStream = new DataOutputStream(new BufferedOutputStream(new GZIPOutputStream(data.clientSocket.getOutputStream()), bufferSize)); data.inputStream = new DataInputStream(new BufferedInputStream(new GZIPInputStream(data.clientSocket.getInputStream()), bufferSize)); } else { data.outputStream = new DataOutputStream(new BufferedOutputStream(data.clientSocket.getOutputStream(), bufferSize)); data.inputStream = new DataInputStream(new BufferedInputStream(data.clientSocket.getInputStream(), bufferSize)); } data.flushInterval = Const.toInt( StringUtil.environmentSubstitute(meta.getFlushInterval()), 4000); r.write(data.outputStream); first=false; } r.writeData(data.outputStream); linesOutput++; // flush every X rows if (linesOutput>0 && data.flushInterval>0 && (linesOutput%data.flushInterval)==0) data.outputStream.flush(); } catch (Exception e) { logError("Error writing to socket : "+e.toString()); logError("Failing row : "+r); logError("Stack trace: "+Const.CR+Const.getStackTracker(e)); setErrors(1); stopAll(); setOutputDone(); return false; } if (checkFeedback(linesRead)) logBasic(Messages.getString("SocketWriter.Log.LineNumber")+linesRead); //$NON-NLS-1$ return true; } public boolean init(StepMetaInterface smi, StepDataInterface sdi) { meta=(SocketWriterMeta)smi; data=(SocketWriterData)sdi; if (super.init(smi, sdi)) { try { int port = Integer.parseInt( StringUtil.environmentSubstitute(meta.getPort()) ); data.serverSocket = new ServerSocket(port); // Add init code here. return true; } catch(Exception e) { logError("Error creating server socket: "+e.toString()); logError(Const.getStackTracker(e)); } } return false; } public void dispose(StepMetaInterface smi, StepDataInterface sdi) { // Ignore errors, we don't care // If we are here, it means all work is done // It's a lot of work to keep it all in sync for now we don't need to do that. try { data.outputStream.close(); } catch(Exception e) {} try { data.inputStream.close(); } catch(Exception e) {} try { data.clientSocket.close(); } catch(Exception e) {} try { data.serverSocket.close(); } catch(Exception e) {} super.dispose(smi, sdi); } // Run is were the action happens! public void run() { try { logBasic(Messages.getString("SocketWriter.Log.StartingToRun")); //$NON-NLS-1$ while (processRow(meta, data) && !isStopped()); } catch(Exception e) { logError(Messages.getString("SocketWriter.Log.UnexpectedError")+" : "+e.toString()); //$NON-NLS-1$ //$NON-NLS-2$ logError(Const.getStackTracker(e)); setErrors(1); stopAll(); } finally { dispose(meta, data); logSummary(); markStop(); } } }
package io.vertx.ext.embeddedmongo; import de.flapdoodle.embed.mongo.Command; import de.flapdoodle.embed.mongo.MongodExecutable; import de.flapdoodle.embed.mongo.MongodStarter; import de.flapdoodle.embed.mongo.config.IMongodConfig; import de.flapdoodle.embed.mongo.config.MongodConfigBuilder; import de.flapdoodle.embed.mongo.config.Net; import de.flapdoodle.embed.mongo.config.RuntimeConfigBuilder; import de.flapdoodle.embed.mongo.distribution.Version; import de.flapdoodle.embed.process.config.IRuntimeConfig; import de.flapdoodle.embed.process.runtime.Network; import io.vertx.core.AbstractVerticle; import io.vertx.core.json.JsonObject; import io.vertx.core.logging.SLF4JLogDelegateFactory; import org.slf4j.Logger; public class EmbeddedMongoVerticle extends AbstractVerticle { private MongodExecutable exe; @Override public void start() throws Exception { if (vertx != null && !context.isWorkerContext()) { throw new IllegalStateException("Must be started as worker verticle!"); } JsonObject config = context.config(); int port = config.getInteger("port"); IMongodConfig embeddedConfig = new MongodConfigBuilder(). version(Version.Main.PRODUCTION). net(new Net(port, Network.localhostIsIPv6())). build(); Logger logger = (Logger) new SLF4JLogDelegateFactory() .createDelegate(EmbeddedMongoVerticle.class.getCanonicalName()).unwrap(); IRuntimeConfig runtimeConfig = new RuntimeConfigBuilder() .defaultsWithLogger(Command.MongoD, logger) .build(); exe = MongodStarter.getInstance(runtimeConfig).prepare(embeddedConfig); exe.start(); } @Override public void stop() { exe.stop(); } }
package net.finkn.inputspec.v050; import net.finkn.inputspec.tools.Generator; import net.finkn.inputspec.tools.ParamCfg; import net.finkn.inputspec.tools.GenTestCase; import org.junit.Test; public class SimpleSingleRangeNextTest { // 100 should be more than enough for relatively small ranges. // A smaller number will speed up tests, but it also increases the risk // of sporadic failures. private final int iterations = 100; private final GenTestCase testCase = GenTestCase.getInstance(); private final float fMin = -Float.MIN_VALUE; private final float fMax = Float.MIN_VALUE; private final double dMin = -Double.MIN_VALUE; private final double dMax = Double.MIN_VALUE; private final String intMin = "10"; private final String intMax = "14"; private final String doubleMin = "" + dMin; private final String doubleMax = "" + dMax; private final String floatMin = "" + fMin; private final String floatMax = "" + fMax; /** When using a singleton inclusive range, the max value is included. */ @Test public void inclInclWithSingleValue() throws Throwable { test(pb() .inclMin("1") .inclMax("1")) .expected(1).run(); } /** When using a singleton exclusive range, the max value is included. */ @Test public void exclExclWithSingleValue() throws Throwable { test(pb() .exclMin("0") .exclMax("2")) .expected(1).run(); } /** When using a singleton mixed range, the max value is included. */ @Test public void inclExclWithSingleValue() throws Throwable { test(pb() .inclMin("1") .exclMax("2")) .expected(1).run(); } /** When using a singleton mixed range, the max value is included. */ @Test public void exclInclWithSingleValue() throws Throwable { test(pb() .exclMin("0") .inclMax("1")) .expected(1).run(); } /** When using a single range, the max allowed value is ignored. */ @Test public void inclInclMaxIgnored() throws Throwable { test(pb() .inclMin(intMin) .inclMax(intMax)) .expected(10,11,12,13).run(); // 14 is missing. } /** When using a single range, the max allowed value is ignored. */ @Test public void exclExclMaxIgnored() throws Throwable { test(pb() .exclMin(intMin) .exclMax(intMax)) .expected(11,12).run(); // 13 is missing. } /** When using a single range, the max allowed value is ignored. */ @Test public void inclExclMaxIgnored() throws Throwable { test(pb() .inclMin(intMin) .exclMax(intMax)) .expected(10,11,12).run(); // 13 is missing. } /** When using a single range, the max allowed value is ignored. */ @Test public void exclInclMaxIgnored() throws Throwable { test(pb() .exclMin(intMin) .inclMax(intMax)) .expected(11,12,13).run(); // 14 is missing. } /** Inclusive range for double includes the limits. */ @Test public void inclusiveDoubleIsInclusive() throws Throwable { test(pb() .type("double") .inclMin(doubleMin) .inclMax(doubleMax)) .expected(dMin, 0.0d, dMax).run(); } /** Exclusive range for double excludes the limits. */ @Test public void exclusiveDoubleIsExclusive() throws Throwable { test(pb() .type("double") .exclMin(doubleMin) .exclMax(doubleMax)) .expected(0.0d).run(); // Limits are indeed exclusive. } /** Inclusive range for float includes the limits. */ @Test public void inclusiveFloatIsInclusive() throws Throwable { test(pb() .type("float") .inclMin(floatMin) .inclMax(floatMax)) .expected(fMin, 0.0f, fMax).run(); } /** Exclusive range for float <strong>includes</strong> the limits. */ @Test public void exclusiveFloatIsInclusive() throws Throwable { test(pb() .type("float") .exclMin(floatMin) .exclMax(floatMax)) .expected(fMin, 0.0f, fMax).run(); // Limits are NOT exclusive. } @Test public void overflowingMaxRollsAroundToMin() throws Throwable { test(pb() .inclMin(Integer.MIN_VALUE) .inclMax(((long) Integer.MAX_VALUE) + 3)) .expected(Integer.MIN_VALUE, Integer.MIN_VALUE + 1).run(); } @Test public void underflowingMinRollsAroundToMax() throws Throwable { test(pb() .inclMin(((long) Integer.MIN_VALUE) - 3) .inclMax(Integer.MAX_VALUE)) .expected(Integer.MAX_VALUE - 1, Integer.MAX_VALUE - 2).run(); } @Test public void boolParamWithLimitsIsLegal() throws Throwable { test(pb() .type("boolean") .inclMin("1") .inclMax("3")) .expected(true, false).run(); } private static ParamCfg.Builder pb() { return ParamCfg.builder(); } private GenTestCase test(ParamCfg.Builder pb) throws Throwable { return testCase.gen(Generator.fromParam(pb.build()).limit(iterations)); } }
package jhberges.camel.consul.leader; import java.io.IOException; import java.net.URL; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.camel.CamelContext; import org.apache.camel.ProducerTemplate; import org.apache.camel.impl.DefaultProducerTemplate; import org.apache.camel.support.LifecycleStrategySupport; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.fluent.Executor; import org.apache.http.client.fluent.Request; import org.apache.http.client.fluent.Response; import org.apache.http.entity.ContentType; import org.apache.http.util.EntityUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; public class ConsulLeaderElector extends LifecycleStrategySupport implements Runnable { public static class Builder { public static final Builder forConsulHost(final String url) { return new Builder(url); } private final String consulUrl; private String serviceName; private String routeId; private CamelContext camelContext; private String username; private String password; private ScheduledExecutorService executor; private Builder(final String url) { this.consulUrl = url; } public ConsulLeaderElector build() throws Exception { final ConsulLeaderElector consulLeaderElector = new ConsulLeaderElector( consulUrl, Optional.ofNullable(username), Optional.ofNullable(password), serviceName, routeId, camelContext); executor.scheduleAtFixedRate(consulLeaderElector, 1, POLL_INTERVAL, TimeUnit.SECONDS); camelContext.addLifecycleStrategy(consulLeaderElector); return consulLeaderElector; } public Builder controllingRoute(final String routeId) { this.routeId = routeId; return this; } public Builder inCamelContext(final CamelContext camelContext) { this.camelContext = camelContext; return this; } public Builder usingBasicAuth(final String username, final String password) { this.username = username; this.password = password; return this; } public Builder usingExecutor(final ScheduledExecutorService executor) { this.executor = executor; return this; } public Builder usingServiceName(final String serviceName) { this.serviceName = serviceName; return this; } } private static final int POLL_INTERVAL = 5; private static final String CONTROLBUS_ROUTE = "controlbus:language:simple"; private static final Logger logger = LoggerFactory.getLogger(ConsulLeaderElector.class); private static final ObjectMapper objectMapper = new ObjectMapper(); private static Optional<String> createSession(final Executor executor, final String consulUrl, final String serviceName) { HttpResponse response; try { final String sessionUrl = String.format("%s/v1/session/create", consulUrl); final int ttlByInterval = (int) (POLL_INTERVAL * 1.5); final String sessionBody = String.format("{\"Name\": \"%s\", \"TTL\": \"%ds\"}", serviceName, 10 > ttlByInterval ? 10 : ttlByInterval); logger.debug("PUT {}\n{}", sessionUrl, sessionBody); response = executor.execute( Request.Put(sessionUrl) .bodyString( sessionBody, ContentType.APPLICATION_JSON)).returnResponse(); if (response.getStatusLine().getStatusCode() == 200) { final Optional<String> newSessionKey = unpackSessionKey(response.getEntity()); logger.info("Consul sessionKey={}", newSessionKey); return newSessionKey; } else { logger.warn("Unable to obtain sessionKey -- will continue as an island: {}", EntityUtils.toString(response.getEntity())); return Optional.empty(); } } catch (final ClientProtocolException e) { logger.warn("Failed to obtain sessionKey \"{}\" -- will continue as an island", e.getMessage()); return Optional.empty(); } catch (final IOException e) { logger.error("Failed to obtain sessionKey \"{}\" -- will continue as an island", e.getMessage()); return Optional.empty(); } } private static void destroySession(final Executor executor, final String consulUrl, final String sessionKey) { logger.info("Destroying consul session {}", sessionKey); try { final HttpResponse response = executor.execute( Request.Put(String.format("%s/v1/session/destroy/%s", consulUrl, sessionKey))).returnResponse(); if (response.getStatusLine().getStatusCode() == 200) { logger.debug("All OK"); } else { logger.warn("Failed to destroy consul session: {}", response.getStatusLine().toString(), EntityUtils.toString(response.getEntity())); } } catch (final IOException e) { logger.error("Failed to destroy consul session: {}", e.getMessage()); } } private static String leaderKey(final String baseUrl, final String serviceName, final String command, final String sessionKey) { return String.format("%s/v1/kv/service/%s/leader?%s=%s", baseUrl, serviceName, command, sessionKey); } private static Optional<Boolean> pollConsul(final Executor executor, final String url, final Optional<String> sessionKey, final String serviceName) { return sessionKey.map(_sessionKey -> { try { final String uri = leaderKey(url, serviceName, "acquire", _sessionKey); logger.debug("PUT {}", uri); final Response response = executor.execute(Request .Put(uri)); final Optional<Boolean> result = Optional.ofNullable(Boolean.valueOf(response.returnContent().asString())); logger.debug("Result: {}", result); return result; } catch (final Exception exception) { logger.warn("Failed to poll consul for leadership: {}", exception.getMessage()); return Optional.<Boolean> empty(); } }).orElse(Optional.empty()); } private static Optional<String> unpackSessionKey(final HttpEntity entity) { try { final Map<String, String> map = objectMapper.readValue(entity.getContent(), new TypeReference<Map<String, String>>() { }); if (Objects.nonNull(map) && map.containsKey("ID")) { return Optional.ofNullable(map.get("ID")); } else { logger.warn("What? No \"ID\"?"); } } catch (UnsupportedOperationException | IOException e) { logger.warn("Failed to parse JSON: %s\n %s", entity.toString(), e.getMessage()); } return Optional.empty(); } private final String consulUrl; private final String routeToControl; private final ProducerTemplate producerTemplate; private final String serviceName; private Optional<String> sessionKey = Optional.empty(); private final Executor executor; protected ConsulLeaderElector( final String consulUrl, final Optional<String> username, final Optional<String> password, final String serviceName, final String routeToControl, final CamelContext camelContext) throws Exception { this.consulUrl = consulUrl; this.serviceName = serviceName; this.routeToControl = routeToControl; this.producerTemplate = DefaultProducerTemplate.newInstance(camelContext, CONTROLBUS_ROUTE); this.producerTemplate.start(); this.executor = Executor.newInstance(); if (username.isPresent()) { executor .auth(username.get(), password.get()) .authPreemptive(new HttpHost(new URL(consulUrl).getHost())); } this.sessionKey = getSessionKey(); } private Optional<String> getSessionKey() { if (!sessionKey.isPresent()) { return createSession(executor, consulUrl, serviceName); } else { return sessionKey; } } @Override public void onContextStop(final CamelContext context) { super.onContextStop(context); final Optional<String> sessionKey = getSessionKey(); sessionKey.ifPresent(_sessionKey -> { logger.info("Releasing Consul session"); final String uri = leaderKey(consulUrl, serviceName, "release", _sessionKey); logger.debug("PUT {}", uri); try { final Response response = executor.execute(Request .Put(uri)); final Optional<Boolean> result = Optional.ofNullable(Boolean.valueOf(response.returnContent().asString())); logger.debug("Result: {}", result); destroySession(executor, consulUrl, _sessionKey); } catch (final Exception e) { logger.warn("Failed to release session key in Consul: {}", e); } }); } @Override public void run() { final Optional<Boolean> isLeader = pollConsul(executor, consulUrl, sessionKey, serviceName); try { if (isLeader.orElse(true)) { // I.e if explicitly leader, or poll // failed. logger.info("Starting route={}", routeToControl); producerTemplate.sendBody( CONTROLBUS_ROUTE, String.format("${camelContext.startRoute(\"%s\")}", routeToControl)); } else { logger.info("Stopping route={}", routeToControl); producerTemplate.sendBody( CONTROLBUS_ROUTE, String.format("${camelContext.stopRoute(\"%s\")}", routeToControl)); } } catch (final Exception exc) { logger.error("Exception during route management", exc); } } }
package org.jmeterplugins.repository; import junit.framework.AssertionFailedError; import net.sf.json.*; import org.apache.commons.io.FileUtils; import org.junit.Test; import java.io.File; import java.io.IOException; import java.net.URI; import java.nio.file.Files; import java.nio.file.Paths; import java.util.*; public class RepoTest { private final Set<String> cache = new HashSet<>(); private String s = File.separator; private File lib = new File(System.getProperty("project.build.directory", "target") + s + "jars" + s + "lib"); private File libExt = new File(lib.getAbsolutePath() + s + "ext"); public RepoTest() { try { FileUtils.deleteDirectory(lib); } catch (IOException e) { e.printStackTrace(); } libExt.mkdirs(); } @Test public void testAll() throws IOException { Map<String, String> env = System.getenv(); if (env.containsKey("TRAVIS")) { System.out.println("Not running test inside Travis CI"); return; } List<String> problems = new ArrayList<>(); String path = getClass().getProtectionDomain().getCodeSource().getLocation().getFile(); String up = File.separator + ".."; String repos = path + up + up + up + File.separator + "site" + File.separator + "dat" + File.separator + "repo"; File dir = new File(repos); System.out.println("Working with " + dir.getCanonicalPath()); File[] files = dir.listFiles(); assert files != null; for (File f : files) { System.out.println("Checking repo: " + f.getCanonicalPath()); String content = new String(Files.readAllBytes(Paths.get(f.getAbsolutePath())), "UTF-8"); JSON json = JSONSerializer.toJSON(content, new JsonConfig()); JSONArray list = (JSONArray) json; for (Object o : list) { JSONObject spec = (JSONObject) o; checkPlugin(problems, f, spec); } } if (problems.size() > 0) { throw new AssertionFailedError(problems.toString()); } } private void checkPlugin(List<String> problems, File f, JSONObject spec) { Plugin plugin = Plugin.fromJSON(spec); try { System.out.println("Checking plugin: " + plugin); plugin.setCandidateVersion(plugin.getMaxVersion()); plugin.download(dummy); File jar = new File(plugin.getTempName()); File dest = new File(plugin.getDestName()); File to = new File(libExt.getAbsolutePath() + File.separator + dest.getName()); jar.renameTo(to); } catch (Throwable e) { problems.add(f.getName() + ":" + plugin); System.err.println("Problem with " + plugin); e.printStackTrace(System.err); } Map<String, String> libs = plugin.getLibs(plugin.getCandidateVersion()); for (String id : libs.keySet()) { if (!cache.contains(libs.get(id))) { try { Downloader dwn = new Downloader(dummy); String file = dwn.download(id, new URI(libs.get(id))); File jar = new File(file); File dest = new File(lib.getAbsolutePath() + File.separator + dwn.getFilename()); jar.renameTo(dest); cache.add(libs.get(id)); } catch (Throwable e) { problems.add(f.getName() + ":" + plugin + ":" + id); System.err.println("Problem with " + id); e.printStackTrace(System.err); } } } } private GenericCallback<String> dummy = new GenericCallback<String>() { @Override public void notify(String s) { } }; }
package me.stefvanschie.buildinggame.managers.plots; import org.bukkit.Bukkit; import org.bukkit.configuration.file.YamlConfiguration; import me.stefvanschie.buildinggame.managers.arenas.ArenaManager; import me.stefvanschie.buildinggame.managers.files.SettingsManager; import me.stefvanschie.buildinggame.utils.Arena; import me.stefvanschie.buildinggame.utils.plot.Boundary; import me.stefvanschie.buildinggame.utils.plot.Plot; public class BoundaryManager { private BoundaryManager() {} private static BoundaryManager instance = new BoundaryManager(); public static BoundaryManager getInstance() { return instance; } public void setup() { for (Arena arena : ArenaManager.getInstance().getArenas()) { for (Plot plot : arena.getPlots()) { try { YamlConfiguration arenas = SettingsManager.getInstance().getArenas(); plot.setBoundary(new Boundary(Bukkit.getWorld(arenas.getString(arena.getName() + "." + plot.getID() + ".high.world")), arenas.getInt(arena.getName() + "." + plot.getID() + ".high.x"), arenas.getInt(arena.getName() + "." + plot.getID() + ".high.y"), arenas.getInt(arena.getName() + "." + plot.getID() + ".high.z"), arenas.getInt(arena.getName() + "." + plot.getID() + ".low.x"), arenas.getInt(arena.getName() + "." + plot.getID() + ".low.y"), arenas.getInt(arena.getName() + "." + plot.getID() + ".low.z"))); } catch (NullPointerException e) { plot.setBoundary(null); } catch (IllegalArgumentException iae) { plot.setBoundary(null); } } } } }
package net.gtaun.wl.common.dialog; import net.gtaun.shoebill.object.Player; import net.gtaun.util.event.Event; import net.gtaun.util.event.EventManager; public abstract class ListDialogExtendEvent extends Event { protected final Player player; protected final EventManager eventManager; protected final WlListDialog dialog; public ListDialogExtendEvent(Player player, EventManager eventManager, WlListDialog dialog) { this.player = player; this.eventManager = eventManager; this.dialog = dialog; } public Player getPlayer() { return player; } public EventManager getEventManager() { return eventManager; } public WlListDialog getDialog() { return dialog; } }
package net.mcft.copy.betterstorage.item; import java.util.List; import net.mcft.copy.betterstorage.misc.ChristmasEventHandler; import net.mcft.copy.betterstorage.misc.Constants; import net.mcft.copy.betterstorage.utils.MiscUtils; import net.mcft.copy.betterstorage.utils.NbtUtils; import net.mcft.copy.betterstorage.utils.StackUtils; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.GuiScreenBook; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.init.Items; import net.minecraft.item.EnumRarity; import net.minecraft.item.ItemEditableBook; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagList; import net.minecraft.util.EnumChatFormatting; import net.minecraft.world.World; import cpw.mods.fml.common.registry.GameRegistry; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; public class ItemPresentBook extends ItemEditableBook { public ItemPresentBook() { setMaxStackSize(1); setCreativeTab(null); String name = MiscUtils.getName(this); setUnlocalizedName(Constants.modId + "." + name); setTextureName(Constants.modId + ":" + name); GameRegistry.registerItem(this, name); } @Override @SideOnly(Side.CLIENT) public boolean hasEffect(ItemStack stack) { return true; } @Override public EnumRarity getRarity(ItemStack stack) { return EnumRarity.rare; } @Override public String getItemStackDisplayName(ItemStack stack) { return "BetterChristmas " + StackUtils.get(stack, 9001, "year"); } @Override @SideOnly(Side.CLIENT) public void addInformation(ItemStack stack, EntityPlayer player, List list, boolean advancedTooltips) { list.add(EnumChatFormatting.DARK_GRAY + "" + EnumChatFormatting.ITALIC + "An instructionary tale..?"); list.add("for " + StackUtils.get(stack, "[undefined]", "name")); } @Override public ItemStack onItemRightClick(ItemStack stack, World world, EntityPlayer player) { if (!world.isRemote || !ChristmasEventHandler.isBeforeChristmas() || (StackUtils.get(stack, 9001, "year") != ChristmasEventHandler.getYear())) return stack; int days = (24 - ChristmasEventHandler.getDay()); String r = EnumChatFormatting.RESET.toString(); String b = EnumChatFormatting.BOLD.toString(); String i = EnumChatFormatting.ITALIC.toString(); String u = EnumChatFormatting.UNDERLINE.toString(); String red = EnumChatFormatting.RED.toString(); String blue = EnumChatFormatting.BLUE.toString(); String black = EnumChatFormatting.BLACK.toString(); String darkRed = EnumChatFormatting.DARK_RED.toString(); String darkAqua = EnumChatFormatting.DARK_AQUA.toString(); String darkGray = EnumChatFormatting.DARK_GRAY.toString(); NBTTagList pages = NbtUtils.createList( " " + darkAqua + b + u + "BetterChristmas" + r + "\n by copygirl" + black + "\n\n" + "Dear " + darkRed + StackUtils.get(stack, "[undefined]", "name") + black + ",\n" + "it's only " + days + " more day" + ((days != 1) ? "s" : "") + " until Christmas. " + "I bet you're all excited to get some nifty presents IRL.\n\n" + "But you know what's even better than real presents?", darkRed + b + " Virtual presents!!\n" + " ~~~~~~~~~~~~~\n" + black + "That's right, you can now make presents in BetterStorage! " + "If you didn't know already, that is.\n\n" + "Additionally, if you hold on to this book, you can " + darkRed + i + "get a present" + black + " this Christmas! Read on to learn more.", darkRed + " [ " + b + "Spirit of Giving" + darkRed + " ]" + black + "\n\n" + "How to make present:\n" + "- Fill cardboard box\n" + "- Surround with wool\n" + "You may choose any two types of wool.\n\n" + " " + u + "Crafting Recipe:\n\n" + red+b + " #" + blue+b + "#" + red+b + "# " + "#" + blue+b + "#" + red+b + "#\n" + " #" + darkRed+b + "O" + red+b + "# " + blue+b + "#" + darkRed+b + "O" + blue+b + "# " + darkGray+b + "#" + black + " = Wool\n" + red+b + " #" + blue+b + "#" + red+b + "# " + "#" + blue+b + "#" + red+b + "# " + darkRed+b + "O" + black + " = Box", darkRed + " [ " + b + "Gift Protection" + darkRed + " ]" + black + "\n\n" + "When it's time to open the presents, it can get quite chaotic.\n" + darkRed + i + "Name tags" + black + " with the recipient's name will ensure only they can open their presents.\n\n" + "You can remove name tags in a " + darkRed + i + "crafting station" + black + " using shears.", darkRed + "[ " + b + "Free Free Free!" + darkRed + " ]" + black + "\n\n" + "Don't lose this book, don't let it fall into the enemies' (nor your friends') hands.\n" + "Hold it and log in on Christmas (24-26 Dec), and you'll get your very own present!\n\n" + "This only works with your own book." ); showBookScreen(player, pages); return stack; } @SideOnly(Side.CLIENT) public static void showBookScreen(EntityPlayer player, NBTTagList pages) { ItemStack stack = new ItemStack(Items.written_book); StackUtils.set(stack, pages, "pages"); Minecraft.getMinecraft().displayGuiScreen(new GuiScreenBook(player, stack, false)); } }
/* @java.file.header */ package org.gridgain.grid.util.typedef; import org.gridgain.grid.*; import org.gridgain.grid.util.*; import org.gridgain.grid.util.typedef.internal.*; import org.jetbrains.annotations.*; import java.io.*; import java.lang.reflect.Array; import java.lang.reflect.*; import java.sql.*; import java.util.*; /** * Defines global scope. * <p> * Contains often used utility functions allowing to cut down on code bloat. This * is somewhat analogous to {@code Predef} in Scala. Note that this should only be used * when this typedef <b>does not sacrifice</b> the code readability. */ public final class X { /** An empty immutable <code>Object</code> array. */ public static final Object[] EMPTY_OBJECT_ARRAY = new Object[0]; /** Time span dividers. */ private static final long[] SPAN_DIVS = new long[] {1000L, 60L, 60L, 60L}; /** The names of methods commonly used to access a wrapped exception. */ private static final String[] CAUSE_MTD_NAMES = new String[] { "getCause", "getNextException", "getTargetException", "getException", "getSourceException", "getRootCause", "getCausedByException", "getNested", "getLinkedException", "getNestedException", "getLinkedCause", "getThrowable" }; /** The Method object for Java 1.4 getCause. */ private static final Method THROWABLE_CAUSE_METHOD; static { Method causeMtd; try { causeMtd = Throwable.class.getMethod("getCause", null); } catch (Exception ignored) { causeMtd = null; } THROWABLE_CAUSE_METHOD = causeMtd; } /** * Ensures singleton. */ private X() { // No-op. } /** * Alias for {@code System.out.println()}. */ public static void println() { System.out.println(); } /** * Alias for {@code System.err.println()}. */ public static void printerrln() { System.err.println(); } /** * Alias for {@code System.out.println}. * * @param s1 First string to print. * @param rest Optional list of objects to print as well. */ public static void println(@Nullable String s1, @Nullable Object... rest) { System.out.println(s1); if (rest != null && rest.length > 0) for (Object obj : rest) System.out.println(obj); } /** * Alias for {@code System.err.println}. * * @param s1 First string to print. * @param rest Optional list of objects to print as well. */ public static void printerrln(@Nullable String s1, @Nullable Object... rest) { error(s1, rest); } /** * Alias for {@code System.err.println}. * * @param s1 First string to print. * @param rest Optional list of objects to print as well. */ public static void error(@Nullable String s1, @Nullable Object... rest) { System.err.println(s1); if (rest != null && rest.length > 0) for (Object obj : rest) System.err.println(obj); } /** * Alias for {@code System.out.print}. * * @param s1 First string to print. * @param rest Optional list of objects to print as well. */ public static void print(@Nullable String s1, @Nullable Object... rest) { System.out.print(s1); if (rest != null && rest.length > 0) for (Object obj : rest) System.out.print(obj); } /** * Alias for {@code System.err.print}. * * @param s1 First string to print. * @param rest Optional list of objects to print as well. */ public static void printerr(@Nullable String s1, @Nullable Object... rest) { System.err.print(s1); if (rest != null && rest.length > 0) for (Object obj : rest) System.err.print(obj); } /** * Gets either system property or environment variable with given name. * * @param name Name of the system property or environment variable. * @return Value of the system property or environment variable. Returns * {@code null} if neither can be found for given name. */ @Nullable public static String getSystemOrEnv(String name) { assert name != null; String v = System.getProperty(name); if (v == null) v = System.getenv(name); return v; } /** * Gets either system property or environment variable with given name. * * @param name Name of the system property or environment variable. * @param dflt Default value. * @return Value of the system property or environment variable. Returns * {@code null} if neither can be found for given name. */ @Nullable public static String getSystemOrEnv(String name, String dflt) { assert name != null; String v = getSystemOrEnv(name); return F.isEmpty(v) ? dflt : v; } /** * Creates string presentation of given time {@code span} in hh:mm:ss:msec {@code HMSM} format. * * @param span Time span. * @return String presentation. */ public static String timeSpan2HMSM(long span) { long[] t = new long[4]; long sp = span; for (int i = 0; i < SPAN_DIVS.length && sp > 0; sp /= SPAN_DIVS[i++]) t[i] = sp % SPAN_DIVS[i]; return (t[3] < 10 ? "0" + t[3] : Long.toString(t[3])) + ':' + (t[2] < 10 ? "0" + t[2] : Long.toString(t[2])) + ':' + (t[1] < 10 ? "0" + t[1] : Long.toString(t[1])) + ':' + (t[0] < 10 ? "0" + t[0] : Long.toString(t[0])); } /** * Creates string presentation of given time {@code span} in hh:mm:ss {@code HMS} format. * * @param span Time span. * @return String presentation. */ public static String timeSpan2HMS(long span) { long[] t = new long[4]; long sp = span; for (int i = 0; i < SPAN_DIVS.length && sp > 0; sp /= SPAN_DIVS[i++]) t[i] = sp % SPAN_DIVS[i]; return (t[3] < 10 ? "0" + t[3] : Long.toString(t[3])) + ':' + (t[2] < 10 ? "0" + t[2] : Long.toString(t[2])) + ':' + (t[1] < 10 ? "0" + t[1] : Long.toString(t[1])); } /** * Clones a passed in object. If parameter {@code deep} is set to {@code true} * then this method will use deep cloning algorithm based on deep reflection * ignoring {@link Cloneable} interface unless parameter {@code honorCloneable} * is set to false. * <p> * If {@code deep} is {@code false} then this method will check the object for * {@link Cloneable} interface and use {@link Object#clone()} to make a copy, * otherwise the object itself will be returned. * * @param obj Object to create a clone from. * @param deep {@code true} to use algorithm of deep cloning. If {@code false} * then this method will always be checking whether a passed in object * implements interface {@link Cloneable} and if it does then method * {@link Object#clone()} will be used to clone object, if does not * then the object itself will be returned. * @param honorCloneable Flag indicating whether {@link Cloneable} interface * should be honored or not when cloning. This parameter makes sense only if * parameter {@code deep} is set to {@code true}. * @param <T> Type of cloning object. * @return Copy of a passed in object. */ @SuppressWarnings({"unchecked"}) @Nullable public static <T> T cloneObject(@Nullable T obj, boolean deep, boolean honorCloneable) { if (obj == null) return null; try { return !deep ? shallowClone(obj) : (T)deepClone(new GridLeanMap<Integer, Integer>(), new ArrayList<>(), obj, honorCloneable); } catch (Throwable e) { throw new GridRuntimeException("Unable to clone instance of class: " + obj.getClass(), e); } } /** * @param obj Object to make a clone for. * @param <T> Type of cloning object. * @return Copy of a passed in object. */ @SuppressWarnings({"unchecked"}) @Nullable private static <T> T shallowClone(@Nullable T obj) { if (obj == null) return null; if (!(obj instanceof Cloneable)) return obj; if (obj.getClass().isArray()) return obj instanceof byte[] ? (T)(((byte[])obj).clone()) : obj instanceof short[] ? (T)(((short[])obj).clone()) : obj instanceof char[] ? (T)(((char[])obj).clone()) : obj instanceof int[] ? (T)(((int[])obj).clone()) : obj instanceof long[] ? (T)(((long[])obj).clone()) : obj instanceof float[] ? (T)(((float[])obj).clone()) : obj instanceof double[] ? (T)(((double[])obj).clone()) : obj instanceof boolean[] ? (T)(((boolean[])obj).clone()) : (T)(((Object[])obj).clone()); try { // 'getDeclaredMethods' searches for ALL methods, 'getMethods' - only public methods. Method mtd = obj.getClass().getDeclaredMethod("clone"); boolean set = false; if (!mtd.isAccessible()) mtd.setAccessible(set = true); T clone = (T)mtd.invoke(obj); if (set) mtd.setAccessible(false); return clone; } catch (Exception e) { throw new GridRuntimeException("Unable to clone instance of class: " + obj.getClass(), e); } } /** * Recursively clones the object. * * @param identityIdxs Map of object identities to indexes in {@code clones} parameter. * @param clones List of already cloned objects. * @param obj The object to deep-clone. * @param honorCloneable {@code true} if method should account {@link Cloneable} interface. * @return Clone of the input object. * @throws Exception If deep-cloning fails. */ @Nullable private static Object deepClone(Map<Integer, Integer> identityIdxs, List<Object> clones, @Nullable Object obj, boolean honorCloneable) throws Exception { if (obj == null) return null; if (honorCloneable && obj instanceof Cloneable) return shallowClone(obj); Integer idx = identityIdxs.get(System.identityHashCode(obj)); Object clone = null; if (idx != null) clone = clones.get(idx); if (clone != null) return clone; if (obj instanceof Class) // No clone needed for java.lang.Class instance. return obj; Class cls = obj.getClass(); if (cls.isArray()) { Class<?> arrType = cls.getComponentType(); int len = Array.getLength(obj); clone = Array.newInstance(arrType, len); for (int i = 0; i < len; i++) Array.set(clone, i, deepClone(identityIdxs, clones, Array.get(obj, i), honorCloneable)); clones.add(clone); identityIdxs.put(System.identityHashCode(obj), clones.size() - 1); return clone; } clone = U.forceNewInstance(cls); if (clone == null) throw new GridRuntimeException("Failed to clone object (empty constructor could not be assigned): " + obj); clones.add(clone); identityIdxs.put(System.identityHashCode(obj), clones.size() - 1); for (Class<?> c = cls; c != Object.class; c = c.getSuperclass()) for (Field f : c.getDeclaredFields()) cloneField(identityIdxs, clones, obj, clone, f, honorCloneable); return clone; } /** * @param identityIdxs Map of object identities to indexes in {@code clones} parameter. * @param clones List of already cloned objects. * @param obj Object to clone. * @param clone Clone. * @param f Field to clone. * @param honorCloneable {@code true} if method should account {@link Cloneable} interface. * @throws Exception If failed. */ private static void cloneField(Map<Integer, Integer> identityIdxs, List<Object> clones, Object obj, Object clone, Field f, boolean honorCloneable) throws Exception { int modifiers = f.getModifiers(); // Skip over static fields. if (Modifier.isStatic(modifiers)) return; boolean set = false; if (!f.isAccessible()) { f.setAccessible(true); set = true; } try { if (f.getType().isPrimitive()) f.set(clone, f.get(obj)); else f.set(clone, deepClone(identityIdxs, clones, f.get(obj), honorCloneable)); } finally { if (set) f.setAccessible(false); } } /** * Checks if passed in {@code 'Throwable'} has given class in {@code 'cause'} hierarchy <b>including</b> that * throwable itself. <p> Note that this method follows includes {@link Throwable#getSuppressed()} into check. * * @param t Throwable to check (if {@code null}, {@code false} is returned). * @param cls Cause classes to check (if {@code null} or empty, {@code false} is returned). * @return {@code True} if one of the causing exception is an instance of passed in classes, {@code false} * otherwise. */ public static boolean hasCause(@Nullable Throwable t, @Nullable Class<? extends Throwable>... cls) { if (t == null || F.isEmpty(cls)) return false; assert cls != null; for (Throwable th = t; th != null; th = th.getCause()) { for (Class<? extends Throwable> c : cls) if (c.isAssignableFrom(th.getClass())) return true; for (Throwable n : th.getSuppressed()) if (hasCause(n, cls)) return true; if (th.getCause() == th) break; } return false; } /** * Checks if passed in {@code 'Throwable'} has given class in {@code 'cause'} hierarchy <b>excluding</b> that * throwable itself. <p> Note that this method follows includes {@link Throwable#getSuppressed()} into check. * * @param t Throwable to check (if {@code null}, {@code false} is returned). * @param cls Cause classes to check (if {@code null} or empty, {@code false} is returned). * @return {@code True} if one of the causing exception is an instance of passed in classes, {@code false} * otherwise. */ public static boolean hasCauseExcludeRoot(@Nullable Throwable t, @Nullable Class<? extends Throwable>... cls) { if (t == null || F.isEmpty(cls)) return false; assert cls != null; for (Throwable th = t.getCause(); th != null; th = th.getCause()) { for (Class<? extends Throwable> c : cls) if (c.isAssignableFrom(th.getClass())) return true; if (th.getCause() == th) break; } for (Throwable n : t.getSuppressed()) if (hasCause(n, cls)) return true; return false; } /** * Gets first cause if passed in {@code 'Throwable'} has given class in {@code 'cause'} hierarchy. * * Note that this method follows includes {@link Throwable#getSuppressed()} into check. * * @param t Throwable to check (if {@code null}, {@code null} is returned). * @param cls Cause class to get cause (if {@code null}, {@code null} is returned). * @return First causing exception of passed in class, {@code null} otherwise. */ @SuppressWarnings({"unchecked"}) @Nullable public static <T extends Throwable> T cause(@Nullable Throwable t, @Nullable Class<T> cls) { if (t == null || cls == null) return null; for (Throwable th = t; th != null; th = th.getCause()) { if (cls.isAssignableFrom(th.getClass())) return (T)th; for (Throwable n : th.getSuppressed()) { T found = cause(n, cls); if (found != null) return found; } if (th.getCause() == th) break; } return null; } /** * Finds a <code>Throwable</code> for known types. * * <p>Uses <code>instanceof</code> checks to examine the exception, looking for well known types which could contain * chained or wrapped exceptions.</p> * * @param throwable the exception to examine * @return the wrapped exception, or <code>null</code> if not found */ private static Throwable getCauseUsingWellKnownTypes(Throwable throwable) { if (throwable instanceof SQLException) { return ((SQLException)throwable).getNextException(); } else if (throwable instanceof InvocationTargetException) { return ((InvocationTargetException)throwable).getTargetException(); } else { return null; } } /** * Finds a <code>Throwable</code> by method name. * * @param throwable the exception to examine * @param mtdName the name of the method to find and invoke * @return the wrapped exception, or <code>null</code> if not found */ private static Throwable getCauseUsingMethodName(Throwable throwable, String mtdName) { Method mtd = null; try { mtd = throwable.getClass().getMethod(mtdName, null); } catch (NoSuchMethodException | SecurityException ignored) { // exception ignored } if (mtd != null && Throwable.class.isAssignableFrom(mtd.getReturnType())) { try { return (Throwable)mtd.invoke(throwable, EMPTY_OBJECT_ARRAY); } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException ignored) { // exception ignored } } return null; } /** * Finds a <code>Throwable</code> by field name. * * @param throwable the exception to examine * @param fieldName the name of the attribute to examine * @return the wrapped exception, or <code>null</code> if not found */ private static Throwable getCauseUsingFieldName(Throwable throwable, String fieldName) { Field field = null; try { field = throwable.getClass().getField(fieldName); } catch (NoSuchFieldException | SecurityException ignored) { // exception ignored } if (field != null && Throwable.class.isAssignableFrom(field.getType())) { try { return (Throwable)field.get(throwable); } catch (IllegalAccessException | IllegalArgumentException ignored) { // exception ignored } } return null; } /** * Checks if the Throwable class has a <code>getCause</code> method. * * This is true for JDK 1.4 and above. * * @return true if Throwable is nestable. */ public static boolean isThrowableNested() { return THROWABLE_CAUSE_METHOD != null; } /** * Checks whether this <code>Throwable</code> class can store a cause. * * This method does <b>not</b> check whether it actually does store a cause. * * @param throwable the <code>Throwable</code> to examine, may be null. * @return boolean <code>true</code> if nested otherwise <code>false</code>. */ public static boolean isNestedThrowable(Throwable throwable) { if (throwable == null) return false; if (throwable instanceof SQLException || throwable instanceof InvocationTargetException) return true; if (isThrowableNested()) return true; Class<?> cls = throwable.getClass(); for (String CAUSE_MTD_NAME : CAUSE_MTD_NAMES) { try { Method mtd = cls.getMethod(CAUSE_MTD_NAME, null); if (mtd != null && Throwable.class.isAssignableFrom(mtd.getReturnType())) { return true; } } catch (NoSuchMethodException | SecurityException ignored) { // exception ignored } } try { Field field = cls.getField("detail"); if (field != null) return true; } catch (NoSuchFieldException | SecurityException ignored) { // exception ignored } return false; } /** * Introspects the <code>Throwable</code> to obtain the cause. * * The method searches for methods with specific names that return a <code>Throwable</code> object. * This will pick up most wrapping exceptions, including those from JDK 1.4. * * The default list searched for are:</p> <ul> <li><code>getCause()</code></li> * <li><code>getNextException()</code></li> <li><code>getTargetException()</code></li> * <li><code>getException()</code></li> <li><code>getSourceException()</code></li> * <li><code>getRootCause()</code></li> <li><code>getCausedByException()</code></li> * <li><code>getNested()</code></li> </ul> * * <p>In the absence of any such method, the object is inspected for a <code>detail</code> * field assignable to a <code>Throwable</code>.</p> * * <p>If none of the above is found, returns <code>null</code>. * * @param throwable the throwable to introspect for a cause, may be null. * @return the cause of the <code>Throwable</code>, * <code>null</code> if none found or null throwable input. */ public static Throwable getCause(Throwable throwable) { return getCause(throwable, CAUSE_MTD_NAMES); } /** * Introspects the <code>Throwable</code> to obtain the cause. * * <ol> <li>Try known exception types.</li> <li>Try the supplied array of method names.</li> <li>Try the field * 'detail'.</li> </ol> * * <p>A <code>null</code> set of method names means use the default set. A <code>null</code> in the set of method * names will be ignored.</p> * * @param throwable the throwable to introspect for a cause, may be null. * @param mtdNames the method names, null treated as default set. * @return the cause of the <code>Throwable</code>, <code>null</code> if none found or null throwable input. */ public static Throwable getCause(Throwable throwable, String[] mtdNames) { if (throwable == null) return null; Throwable cause = getCauseUsingWellKnownTypes(throwable); if (cause == null) { if (mtdNames == null) mtdNames = CAUSE_MTD_NAMES; for (String mtdName : mtdNames) { if (mtdName != null) { cause = getCauseUsingMethodName(throwable, mtdName); if (cause != null) break; } } if (cause == null) cause = getCauseUsingFieldName(throwable, "detail"); } return cause; } /** * Returns the list of <code>Throwable</code> objects in the exception chain. * * <p>A throwable without cause will return a list containing one element - the input throwable. A throwable with * one cause will return a list containing two elements. - the input throwable and the cause throwable. A * <code>null</code> throwable will return a list of size zero.</p> * * <p>This method handles recursive cause structures that might otherwise cause infinite loops. The cause chain is * processed until the end is reached, or until the next item in the chain is already in the result set.</p> * * @param throwable the throwable to inspect, may be null * @return the list of throwables, never null */ public static List<Throwable> getThrowableList(Throwable throwable) { List<Throwable> list = new ArrayList<>(); while (throwable != null && !list.contains(throwable)) { list.add(throwable); throwable = getCause(throwable); } return list; } /** * Returns the list of <code>Throwable</code> objects in the exception chain. * * <p>A throwable without cause will return an array containing one element - the input throwable. A throwable with * one cause will return an array containing two elements. - the input throwable and the cause throwable. A * <code>null</code> throwable will return an array of size zero.</p> * * <p>From version 2.2, this method handles recursive cause structures that might otherwise cause infinite loops. * The cause chain is processed until the end is reached, or until the next item in the chain is already in the * result set.</p> * * @param throwable the throwable to inspect, may be null * @return the array of throwables, never null * @see #getThrowableList(Throwable) */ public static Throwable[] getThrowables(Throwable throwable) { List<Throwable> list = getThrowableList(throwable); return list.toArray(new Throwable[list.size()]); } /** * A way to get the entire nested stack-trace of an throwable. * * <p>The result of this method is highly dependent on the JDK version and whether the exceptions override * printStackTrace or not.</p> * * @param throwable the <code>Throwable</code> to be examined * @return the nested stack trace, with the root cause first */ public static String getFullStackTrace(Throwable throwable) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw, true); Throwable[] ts = getThrowables(throwable); for (Throwable t : ts) { t.printStackTrace(pw); if (isNestedThrowable(t)) break; } return sw.getBuffer().toString(); } /** * Synchronously waits for all futures in the collection. * * @param futs Futures to wait for. * @throws GridException If any of the futures threw exception. */ public static void waitAll(@Nullable Iterable<GridFuture<?>> futs) throws GridException { if (F.isEmpty(futs)) return; for (GridFuture fut : futs) fut.get(); } /** * Pretty-formatting for minutes. * * @param mins Minutes to format. * @return Formatted presentation of minutes. */ public static String formatMins(long mins) { assert mins >= 0; if (mins == 0) return "< 1 min"; SB sb = new SB(); long dd = mins / 1440; // 1440 mins = 60 mins * 24 hours if (dd > 0) sb.a(dd).a(dd == 1 ? " day " : " days "); mins %= 1440; long hh = mins / 60; if (hh > 0) sb.a(hh).a(hh == 1 ? " hour " : " hours "); mins %= 60; if (mins > 0) sb.a(mins).a(mins == 1 ? " min " : " mins "); return sb.toString().trim(); } /** * Exits with code {@code -1} if maximum memory is below 90% of minimally allowed threshold. * * @param min Minimum memory threshold. */ public static void checkMinMemory(long min) { long maxMem = Runtime.getRuntime().maxMemory(); if (maxMem < .85 * min) { printerrln("Heap limit is too low (" + (maxMem / (1024 * 1024)) + "MB), please increase heap size at least up to " + (min / (1024 * 1024)) + "MB."); System.exit(-1); } } /** * Copies input byte stream to output byte stream. * * @param in Input byte stream. * @param out Output byte stream. * @param bufSize Intermediate buffer size. * @return Number of the copied bytes. * @throws IOException Thrown if an I/O error occurs. */ public static int copy(InputStream in, OutputStream out, int bufSize) throws IOException { byte[] buf = new byte[bufSize]; int cnt = 0; for (int n; (n = in.read(buf)) > 0;) { out.write(buf, 0, n); cnt += n; } return cnt; } /** * Tries to resolve GridGain installation home folder. * * @return Installation home folder. * @throws GridException If GridGain home folder was not set. */ public static String resolveGridGainHome() throws GridException { String var = getSystemOrEnv("GRIDGAIN_HOME"); if (var != null) return var; else throw new GridException("Failed to resolve GridGain home folder " + "(please set 'GRIDGAIN_HOME' environment or system variable)"); } /** * Parses double from possibly {@code null} or invalid string. * * @param s String to parse double from. If string is null or invalid, a default value is used. * @param dflt Default value for double, if parsing failed. * @return Resulting double. */ public static double parseDouble(@Nullable String s, double dflt) { try { return s != null ? Double.parseDouble(s) : dflt; } catch (NumberFormatException ignored) { return dflt; } } }
package eskimo.invoker.utils; import eskimo.invoker.config.InvokerSettings; import eskimo.invoker.entity.ExecutionResult; import eskimo.invoker.services.ExecuteServiceWindows; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.io.BufferedInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.TimeUnit; @Component public class InvokerUtils { private static final Logger logger = LoggerFactory.getLogger(ExecuteServiceWindows.class); @Autowired private InvokerSettings settings; @Autowired private InvokerUtils invokerUtils; public File createTempFolder() throws IOException { File temp = settings.getRunnerTempPath(); temp.mkdirs(); return Files.createTempDirectory(temp.toPath(), "invoker-").toFile(); } public File createRunnerTempFolder(String prefix) throws IOException { File temp = settings.getRunnerTempPath(); temp.mkdirs(); return Files.createTempDirectory(temp.toPath(), prefix).toFile(); } public ExecutionResult executeCommand(List<String> commands, long timeLimit) throws IOException, InterruptedException { return executeCommand(commands, timeLimit, null, null, null); } public ExecutionResult executeCommand(List<String> commands, long timeLimit, File redirectInput, File redirectOutput, File redirectError) throws IOException, InterruptedException { return executeCommand(commands.toArray(new String[0]), timeLimit, redirectInput, redirectOutput, redirectError); } public ExecutionResult executeCommand(String[] commands, long timeLimit, File redirectInput, File redirectOutput, File redirectError) throws IOException, InterruptedException { File folder = null; try { logger.info("execute command: " + Arrays.toString(commands)); ProcessBuilder pb = new ProcessBuilder(commands); folder = invokerUtils.createTempFolder(); if (redirectInput != null) { pb.redirectInput(redirectInput); } if (redirectOutput == null) { redirectOutput = new File(folder.getAbsolutePath() + File.separator + "stdout.txt"); } if (redirectError == null) { redirectError = new File(folder.getAbsolutePath() + File.separator + "stderr.txt"); } pb.redirectOutput(redirectOutput); pb.redirectError(redirectError); Process process = pb.start(); boolean timeOutExceeded = false; ExecutionResult result = new ExecutionResult(); if (timeLimit == 0) process.waitFor(); else { timeOutExceeded = !process.waitFor(timeLimit, TimeUnit.MILLISECONDS); } if (timeOutExceeded) { process.destroy(); } else { result.setExitCode(process.exitValue()); } if (redirectOutput.exists()) { result.setStdout(FileUtils.readFileToString(redirectOutput)); } if (redirectError.exists()) { result.setStderr(FileUtils.readFileToString(redirectError)); } result.setTimeOutExceeded(timeOutExceeded); return result; } finally { try { FileUtils.deleteDirectory(folder); } catch (IOException e) { logger.error("Can't delete directory: " + folder.getAbsolutePath(), e); } } } private String readInputStream(InputStream is) { if (is == null) { return null; } BufferedInputStream bis = new BufferedInputStream(is); try { return IOUtils.toString(bis); } catch (IOException e) { logger.warn("Can't read input stream", e); return null; } } public ExecutionResult executeRunner(List<String> programCommand, File input, File output, File stderr, File stat, long timeLimit, long memoryLimit, File workingFolder, boolean allowCreateProcesses) throws IOException, InterruptedException { File runner = prepareRunner(); List<String> command = new ArrayList<>(); command.add(runner.getAbsolutePath()); command.add("-t"); command.add(timeLimit + "ms"); command.add("-m"); command.add(memoryLimit + "K"); command.add("-y"); command.add("10"); command.add("-d"); command.add(workingFolder.getAbsolutePath()); command.add("-x"); if (input != null) { command.add("-i"); command.add(input.getAbsolutePath()); } if (output != null) { command.add("-o"); command.add(output.getAbsolutePath()); } if (stderr != null) { command.add("-e"); command.add(stderr.getAbsolutePath()); } if (stat != null) { command.add("-s"); command.add(stat.getAbsolutePath()); } if (allowCreateProcesses) { command.add("--allow-create-processes"); } command.addAll(programCommand); return executeCommand(command, 60000); } private File prepareRunner() throws IOException { String mode = "x64"; try { File folder = new File(settings.getStoragePath().getAbsolutePath() + File.separator + "runner" + File.separator + mode); folder.mkdirs(); File runner = new File(folder.getAbsoluteFile() + "/run.exe"); File dll = new File(folder.getAbsolutePath() + "/invoke2.dll"); if (!runner.exists()) { try (InputStream is = getClass().getClassLoader().getResourceAsStream("runner/" + mode + "/run.exe")) { FileUtils.copyInputStreamToFile(is, runner); } } if (!dll.exists()) { try (InputStream is = getClass().getClassLoader().getResourceAsStream("runner/" + mode + "/invoke2.exe")) { FileUtils.copyInputStreamToFile(is, dll); } } return runner; } catch (IOException e) { logger.error("Can't prepare runner " + mode, e); throw e; } } }
package czsem.fs.query; import java.util.List; import czsem.fs.FSTokenizer; public class FSQueryParser { protected List<Character> chars; protected List<String> strings; protected int charIndex = 0; protected int stringIndex = 0; protected FSQueryBuilder builder; public FSQueryParser(FSQueryBuilder builder) { this.builder = builder; } public static class SyntaxError extends Exception { public SyntaxError(String message) { super(message); } private static final long serialVersionUID = 595782365757384397L; } public void parse(String input) throws SyntaxError { FSTokenizer tokenizer = new FSTokenizer(input); chars = tokenizer.getCharList(); strings = tokenizer.getStringList(); parseNode(); } protected void parseNode() throws SyntaxError { expectChar('['); builder.addNode(); parseRestrictions(); expectChar(']'); if (moreCharsAvailable() && nextCharIs('(')) { parseChildren(); } } protected void parseChildren() throws SyntaxError { expectChar('('); builder.beginChildren(); for (;;) { parseNode(); if (! nextCharIs(',')) break; expectChar(','); } expectChar(')'); builder.endChildren(); } protected void parseRestrictions() throws SyntaxError { for (;;) { parseRestriction(); if (! nextCharIs(',')) break; expectChar(','); } } protected void parseRestriction() throws SyntaxError { if (nextCharIs(']')) return; expectChar(null); StringBuilder comparator = new StringBuilder(); comparator.append(expectCompratorChar()); if (! nextCharIs(null)) { comparator.append(expectCompratorChar()); } expectChar(null); builder.addRestriction(comparator.toString(), getStringAndMove().trim(), getStringAndMove()); } protected char expectCompratorChar() throws SyntaxError { Character ch = getCurrentCharAndMove(); if ( ch == null || FSTokenizer.isSpecialChar(ch) != FSTokenizer.SpecialChar.EVEN_STRING_COMPARATOR) throw new SyntaxError(String.format("Comparator expected but '%c' found!", ch)); return ch; } protected boolean nextCharIs(Character next) { if (next == getCurrentChar()) return true; //mainly if both are null if (next == null) return false; //because of previous return next.equals(getCurrentChar()); } protected void expectChar(Character expected) throws SyntaxError { Character ch = getCurrentCharAndMove(); if (expected == ch) return; //mainly if both are null - return ok; if (expected == null || !expected.equals(ch)) throw new SyntaxError(String.format("Character '%c' expected but '%c' found!", expected, ch)); } protected Character getCurrentCharAndMove() { Character ch = getCurrentChar(); charIndex++; return ch; } protected Character findNextChar() { Character ch; do { ch = chars.get(++charIndex); if (ch == null) return ch; } while (ch == ' '); return ch; } protected Character getCurrentChar() { if (charIndex >= chars.size()) return null; Character ch = chars.get(charIndex); if (ch == null) return ch; if (ch == ' ') ch = findNextChar(); return ch; } protected boolean moreCharsAvailable() { for (int i = charIndex+1; i<chars.size(); i++) { Character ch = chars.get(i); if (ch == null) return true; if (ch != ' ') return true; } return false; } protected String getStringAndMove() { return strings.get(stringIndex++); } }
package net.straininfo2.grs.idloader.db; import net.straininfo2.grs.idloader.bioproject.domain.AdminBioProject; import net.straininfo2.grs.idloader.bioproject.domain.BioProject; import net.straininfo2.grs.idloader.bioproject.domain.SubmissionBioProject; import net.straininfo2.grs.idloader.bioproject.xmlparsing.DomainHandler; import org.hibernate.SessionFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; public class BioProjectLoader implements DomainHandler { // TODO: ADD CLEANUP OF ORGANISM ONE-TO-ONEs (elsewhere) /* OrganismMorphology OrganismEnvironment OrganismPhenotype OrganismSample */ @Autowired SessionFactory sessionFactory; private int count = 0; private final static Logger logger = LoggerFactory.getLogger(DomainHandler.class); public BioProjectLoader() { } public SessionFactory getSessionFactory() { return sessionFactory; } public void setSessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } private void updateCount() { if (count++ % 30 == 0) { sessionFactory.getCurrentSession().flush(); } } @Override public void processBioProject(BioProject project) { logger.info("Saving project with ID {}", project.getProjectId()); sessionFactory.getCurrentSession().merge(project); updateCount(); } @Override public void processAdminBioProject(AdminBioProject project) { logger.info("Saving project with ID {}", project.getProjectId()); sessionFactory.getCurrentSession().merge(project); updateCount(); } @Override public void processSubmissionBioProject(SubmissionBioProject project) { logger.info("Saving project with ID {}", project.getProjectId()); sessionFactory.getCurrentSession().merge(project); updateCount(); } }
package org.openqa.selenium; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.openqa.selenium.testing.Ignore.Driver.ALL; import static org.openqa.selenium.testing.Ignore.Driver.ANDROID; import static org.openqa.selenium.testing.Ignore.Driver.CHROME; import static org.openqa.selenium.testing.Ignore.Driver.HTMLUNIT; import static org.openqa.selenium.testing.Ignore.Driver.IE; import static org.openqa.selenium.testing.Ignore.Driver.IPHONE; import static org.openqa.selenium.testing.Ignore.Driver.SELENESE; import static org.openqa.selenium.testing.Ignore.Driver.OPERA; import org.openqa.selenium.interactions.MoveTargetOutOfBoundsException; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.JavascriptEnabled; @Ignore(value = {ANDROID, HTMLUNIT}, reason = "Android: Race condition when click returns, " + "the UI did not finish scrolling..\nHtmlUnit: Scrolling requires rendering") public class ClickScrollingTest extends AbstractDriverTestCase { @JavascriptEnabled public void testClickingOnAnchorScrollsPage() { String scrollScript = ""; scrollScript += "var pageY;"; scrollScript += "if (typeof(window.pageYOffset) == 'number') {"; scrollScript += " pageY = window.pageYOffset;"; scrollScript += "} else {"; scrollScript += " pageY = document.documentElement.scrollTop;"; scrollScript += "}"; scrollScript += "return pageY;"; driver.get(pages.macbethPage); driver.findElement(By.partialLinkText("last speech")).click(); long yOffset = (Long) ((JavascriptExecutor) driver) .executeScript(scrollScript); // Focusing on to click, but not actually following, // the link will scroll it in to view, which is a few pixels further than 0 assertThat("Did not scroll", yOffset, is(greaterThan(300L))); } public void testShouldScrollToClickOnAnElementHiddenByOverflow() { String url = appServer.whereIs("click_out_of_bounds_overflow.html"); driver.get(url); WebElement link = driver.findElement(By.id("link")); try { link.click(); } catch (MoveTargetOutOfBoundsException e) { fail("Should not be out of bounds: " + e.getMessage()); } } public void testShouldBeAbleToClickOnAnElementHiddenByOverflow() { driver.get(appServer.whereIs("scroll.html")); WebElement link = driver.findElement(By.id("line8")); // This used to throw a MoveTargetOutOfBoundsException - we don't expect it to link.click(); assertEquals("line8", driver.findElement(By.id("clicked")).getText()); } @Ignore({SELENESE, OPERA}) public void testShouldNotScrollOverflowElementsWhichAreVisible() { driver.get(appServer.whereIs("scroll2.html")); WebElement list = driver.findElement(By.tagName("ul")); WebElement item = list.findElement(By.id("desired")); item.click(); long yOffset = (Long)((JavascriptExecutor)driver).executeScript("return arguments[0].scrollTop;", list); assertEquals("Should not have scrolled", 0, yOffset); } @Ignore({CHROME, IPHONE}) public void testShouldNotScrollIfAlreadyScrolledAndElementIsInView() { driver.get(appServer.whereIs("scroll3.html")); driver.findElement(By.id("button1")).click(); long scrollTop = getScrollTop(); driver.findElement(By.id("button2")).click(); assertEquals(scrollTop, getScrollTop()); } public void testShouldBeAbleToClickRadioButtonScrolledIntoView() { driver.get(appServer.whereIs("scroll4.html")); driver.findElement(By.id("radio")).click(); // If we don't throw, we're good } @Ignore(value = {IE}, reason = "IE has special overflow handling") public void testShouldScrollOverflowElementsIfClickPointIsOutOfViewButElementIsInView() { driver.get(appServer.whereIs("scroll5.html")); driver.findElement(By.id("inner")).click(); assertEquals("clicked", driver.findElement(By.id("clicked")).getText()); } private long getScrollTop() { return (Long)((JavascriptExecutor)driver).executeScript("return document.body.scrollTop;"); } }
package org.dspace.app.itemimport; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.StringTokenizer; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.traversal.NodeIterator; import org.xml.sax.SAXException; import org.apache.xpath.XPathAPI; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.BitstreamFormat; import org.dspace.content.Collection; import org.dspace.content.FormatIdentifier; import org.dspace.content.InstallItem; import org.dspace.content.WorkspaceItem; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.core.Constants; import org.dspace.eperson.EPerson; import org.dspace.handle.HandleManager; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowManager; /* issues javadocs - even though it's not an API allow re-importing list of collections to choose from would be nice too */ /** * The Item importer does exactly that - imports items into * the repository. */ public class ItemImport { public static void main(String argv[]) throws Exception { String usage = "Itemimport has three modes of operation:\n" + " add = import contents of source_dir, and create mapfile\n" + " replace = use mapfile from previously imported items and replace\n" + " remove = use mapfile from previously imported items and delete them\n" + "\n" + "ItemImport add EPersonID collectionID source_dir mapfile\n" + "ItemImport replace EPersonID collectionID source_dir mapfile\n" + "ItemImport remove mapfile\n"; Context c = null; ItemImport myloader = new ItemImport(); int collectionID = -1; int epersonID = -1; String sourceDir = null; String mapFile = null; Collection mycollection = null; if( argv.length < 3 ) { System.out.println( usage ); System.exit( 1 ); } // now get the args if( argv[0].equals( "remove" ) && (argv.length == 3) ) { // collectionID = Integer.parseInt( argv[1] ); mapFile = argv[2]; } else if( argv[0].equals( "add" ) && (argv.length == 5) ) { epersonID = Integer.parseInt( argv[1] ); collectionID = Integer.parseInt( argv[2] ); sourceDir = argv[3]; mapFile = argv[4]; } else if( argv[0].equals( "replace" ) && (argv.length == 5) ) { epersonID = Integer.parseInt( argv[1] ); collectionID = Integer.parseInt( argv[2] ); sourceDir = argv[3]; mapFile = argv[4]; } else { System.out.println( usage ); System.exit( 1 ); } try { c = new Context(); if( epersonID != -1 ) { EPerson ep = EPerson.find(c, epersonID); c.setCurrentUser( ep ); } c.setIgnoreAuthorization( true ); if( argv[0].equals( "add" ) ) { mycollection = Collection.find( c, collectionID ); myloader.addItems( c, mycollection, sourceDir, mapFile ); } else if( argv[0].equals( "replace" ) ) { mycollection = Collection.find( c, collectionID ); myloader.replaceItems( c, mycollection, sourceDir, mapFile ); } else if( argv[0].equals( "remove" ) ) { myloader.removeItems( c, mycollection, mapFile ); } // complete all transactions c.complete(); } catch( Exception e ) { // abort all operations c.abort(); e.printStackTrace(); System.out.println( e ); } } private void addItems( Context c, Collection mycollection, String sourceDir, String mapFile ) throws Exception { System.out.println( "Adding items from directory: " + sourceDir ); System.out.println( "Generating mapfile: " + mapFile ); // create the mapfile File outFile = new File( mapFile ); PrintWriter mapOut = new PrintWriter( new FileWriter( outFile ) ); // now process the source directory String [] dircontents = new java.io.File( sourceDir ).list(); for( int i = 0; i < dircontents.length; i++ ) { addItem( c, mycollection, sourceDir, dircontents[ i ], mapOut ); System.out.println( i + " " + dircontents[ i ] ); } mapOut.close(); } private void replaceItems( Context c, Collection mycollection, String sourceDir, String mapFile ) throws Exception { // read in HashMap first, to get list of handles & source dirs HashMap myhash = readMapFile( mapFile ); // for each handle, re-import the item, discard the new handle // and re-assign the old handle Iterator i = myhash.keySet().iterator(); ArrayList itemsToDelete = new ArrayList(); while( i.hasNext() ) { // get the old handle String newItemName = (String)i.next(); String oldHandle = (String)myhash.get(newItemName); System.out.println("Replacing: " + oldHandle); // add new item, locate old one Item oldItem = (Item)HandleManager.resolveToObject(c, oldHandle); Item newItem = addItem(c, mycollection, sourceDir, newItemName, null); String newHandle = HandleManager.findHandle(c, newItem); // discard the new handle - FIXME: database hack String myquery = "DELETE FROM handle WHERE resource_type_id=" + Constants.ITEM + " AND resource_id=" + newItem.getID(); DatabaseManager.updateQuery(c, myquery ); // re-assign the old handle one to the new item myquery = "UPDATE handle set resource_id=" + newItem.getID() + " WHERE handle.handle LIKE '" + oldHandle + "'"; DatabaseManager.updateQuery(c, myquery ); // schedule item for demolition itemsToDelete.add( oldItem ); } // now run through again, deleting items (do this last to avoid disasters!) // (this way deletes only happen if there have been no errors previously) i = itemsToDelete.iterator(); while( i.hasNext() ) { removeItem(c, (Item)i.next()); } } private void removeItems( Context c, Collection mycollection, String mapFile ) throws Exception { System.out.println( "Deleting items listed in mapfile: " + mapFile ); // read in the mapfile HashMap myhash = readMapFile( mapFile ); // now delete everything that appeared in the mapFile Iterator i = myhash.keySet().iterator(); while( i.hasNext() ) { String myhandle = (String)myhash.get( i.next() ); System.out.println("Deleting item " + myhandle); removeItem(c, myhandle); } } // item? try and add it to the archive private Item addItem( Context c, Collection mycollection, String path, String itemname, PrintWriter mapOut ) throws Exception { Item myitem = null; // create workspace item WorkspaceItem wi = WorkspaceItem.create(c, mycollection, false); myitem = wi.getItem(); // now fill out dublin core for item loadDublinCore( c, myitem, path + "/" + itemname + "/" + "dublin_core.xml" ); // and the bitstreams from the contents file // process contents file, add bistreams and bundles processContentsFile( c, myitem, path + "/" + itemname, "contents" ); // put item in system InstallItem.installItem(c, wi); // now output line to the mapfile String myhandle = HandleManager.findHandle(c, myitem); if(mapOut!=null) { mapOut.println( itemname + " " + myhandle ); } return myitem; } // remove, given the actual item private void removeItem( Context c, Item myitem ) throws Exception { Collection[] collections = myitem.getCollections(); // Remove item from all the collections it's in for (int i = 0; i < collections.length; i++) { collections[i].removeItem(myitem); } } // remove, given a handle private void removeItem( Context c, String myhandle ) throws Exception { // bit of a hack - to remove an item, you must remove it // from all collections it's a part of, then it will be removed Item myitem = (Item)HandleManager.resolveToObject(c, myhandle); removeItem( c, myitem ); } // utility methods // read in the map file and generate a hashmap of (file,handle) pairs private HashMap readMapFile( String filename ) throws Exception { HashMap myhash = new HashMap(); BufferedReader is = new BufferedReader( new FileReader( filename ) ); String line; while( ( line = is.readLine() ) != null ) { String myfile; String myhandle; // a line should be archive filename<whitespace>handle StringTokenizer st = new StringTokenizer( line ); if( st.hasMoreTokens() ) { myfile = st.nextToken(); } else throw new Exception("Bad mapfile line:\n" + line ); if( st.hasMoreTokens() ) { myhandle = st.nextToken(); } else throw new Exception("Bad mapfile line:\n" + line ); myhash.put( myfile, myhandle ); } is.close(); return myhash; } private void loadDublinCore(Context c, Item myitem, String filename) throws SQLException, IOException, ParserConfigurationException, SAXException, TransformerException //, AuthorizeException { Document document = loadXML(filename); // Get the nodes corresponding to formats NodeList dcNodes = XPathAPI.selectNodeList(document, "/dublin_core/dcvalue"); System.out.println("Nodelist has # elements: " + dcNodes.getLength() ); // Add each one as a new format to the registry for (int i=0; i < dcNodes.getLength(); i++) { Node n = dcNodes.item(i); addDCValue(myitem, n); } } private void addDCValue(Item i, Node n) throws TransformerException { String value = getStringValue(n); //n.getNodeValue(); //getElementData(n, "element"); String element = getAttributeValue(n, "element"); String qualifier = getAttributeValue(n, "qualifier"); //NodeValue(); //getElementData(n, "qualifier"); System.out.println("Element: " + element + " Qualifier: " + qualifier + " Value: " + value ); if( qualifier.equals("none") ) qualifier = null; i.addDC(element, qualifier, "en", value); } /** * Return the String value of a Node */ public String getStringValue(Node node) { String value = node.getNodeValue(); if (node.hasChildNodes()) { Node first = node.getFirstChild(); if (first.getNodeType() == Node.TEXT_NODE) { return first.getNodeValue(); } } return value; } /** * Given a contents file and an item, stuffing it with bitstreams from the * contents file */ private void processContentsFile( Context c, Item i, String path, String filename ) { String contentspath = path + "/" + filename; String line = ""; System.out.println( "Processing contents file: " + contentspath ); try { BufferedReader is = new BufferedReader( new FileReader( contentspath ) ); while( ( line = is.readLine() ) != null ) { System.out.println( "Bitstream: " + line ); processContentFileEntry( c, i, path, line ); } is.close(); } catch( Exception e ) { e.printStackTrace(); System.out.println( "Caught exception: " + e ); } } // each entry represents a bitstream.... public void processContentFileEntry( Context c, Item i, String path, String name) throws SQLException, IOException, AuthorizeException { String fullpath = path + "/" + name; // get an input stream BufferedInputStream bis = new BufferedInputStream( new FileInputStream( fullpath ) ); // add it to the item in a bundle Bitstream bs = i.createSingleBitstream(bis); bs.setName( name ); // Identify the format BitstreamFormat bf = FormatIdentifier.guessFormat(c, bs); bs.setFormat(bf); bs.update(); } // XML utility methods public String getAttributeValue(Node n, String myattributename) { String myvalue = ""; NamedNodeMap nm = n.getAttributes(); for (int i = 0; i < nm.getLength(); i++ ) { Node node = nm.item(i); String name = node.getNodeName(); String value = node.getNodeValue(); if(myattributename.equals(name)) { return value; } } return myvalue; } // XML utility methods stolen from administer. /** * Get the CDATA of a particular element. For example, if the XML document * contains: * <P> * <code> * &lt;foo&gt;&lt;mimetype&gt;application/pdf&lt;/mimetype&gt;&lt;/foo&gt; * </code> * passing this the <code>foo</code> node and <code>mimetype</code> will * return <code>application/pdf</code>.</P> * Why this isn't a core part of the XML API I do not know... * * @param parentElement the element, whose child element you want * the CDATA from * @param childName the name of the element you want the CDATA from * * @return the CDATA as a <code>String</code> */ private String getElementData(Node parentElement, String childName) throws TransformerException { // Grab the child node Node childNode = XPathAPI.selectSingleNode(parentElement, childName); if (childNode == null) { // No child node, so no values return null; } // Get the #text Node dataNode = childNode.getFirstChild(); if (dataNode==null) { return null; } // Get the data String value = dataNode.getNodeValue().trim(); return value; } /** * Load in the XML from file. * * @param filename the filename to load from * * @return the DOM representation of the XML file */ private static Document loadXML(String filename) throws IOException, ParserConfigurationException, SAXException { DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); return builder.parse(new File(filename)); } }
package next.operator.currency.model; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; import lombok.ToString; import javax.validation.ValidationException; import java.math.BigDecimal; import java.time.LocalDateTime; @Getter @Setter @NoArgsConstructor @ToString public class CurrencyExrateModel { private String exFrom; private String exTo; private LocalDateTime time; private BigDecimal exrate; public CurrencyExrateModel reverse() { final CurrencyExrateModel reversion = new CurrencyExrateModel(); reversion.setExFrom(this.exTo); reversion.setExTo(this.exFrom); reversion.setTime(this.time); reversion.setExrate(BigDecimal.ONE.divide(this.exrate, 6, BigDecimal.ROUND_HALF_UP)); return reversion; } public CurrencyExrateModel merge(CurrencyExrateModel next) { final CurrencyExrateModel merged = new CurrencyExrateModel(); if (!this.exTo.equals(next.exFrom)) { throw new ValidationException("[left:" + this + ", right:" + next + "]"); } else { merged.exFrom = this.exFrom; merged.exTo = next.exTo; merged.time = this.time.compareTo(next.time) > 0 ? next.time : this.time; merged.exrate = this.exrate.multiply(next.exrate); } return merged; } }
package net.bull.javamelody; import java.io.File; import java.util.HashSet; import javax.servlet.FilterConfig; import javax.servlet.ServletContext; import org.jrobin.core.RrdBackendFactory; import org.jrobin.core.RrdException; import org.quartz.SchedulerException; import org.quartz.impl.StdSchedulerFactory; /** * Classe utilitaire pour les tests unitaires. * @author Emeric Vernat */ final class Utils { private static final String SYSTEM_ACTIONS_PROPERTY_NAME = Parameters.PARAMETER_SYSTEM_PREFIX + Parameter.SYSTEM_ACTIONS_ENABLED.getCode(); private Utils() { super(); } static void setProperty(Parameter parameter, String value) { setProperty(Parameters.PARAMETER_SYSTEM_PREFIX + parameter.getCode(), value); } static void setProperty(String string, String value) { if (value == null) { System.getProperties().remove(string); } else { System.setProperty(string, value); } } static void initialize() { for (final Object systemProperty : new HashSet<Object>(System.getProperties().keySet())) { if (systemProperty.toString().startsWith(Parameters.PARAMETER_SYSTEM_PREFIX)) { System.getProperties().remove(systemProperty.toString()); } } JRobin.stop(); try { StdSchedulerFactory.getDefaultScheduler().shutdown(); } catch (final SchedulerException e) { throw new IllegalStateException(e); } Parameters.initialize((FilterConfig) null); Parameters.initialize((ServletContext) null); Parameters.initJdbcDriverParameters(null, null); // pour avoir les informations sur les connections, l'initialisation de la classe JdbcWrapper System.setProperty(SYSTEM_ACTIONS_PROPERTY_NAME, "true"); JdbcWrapper.USED_CONNECTION_INFORMATIONS.clear(); System.getProperties().remove(SYSTEM_ACTIONS_PROPERTY_NAME); new File(System.getProperty("java.io.tmpdir")).mkdirs(); try { // we must initialize default factory before creating any rrd if (!RrdBackendFactory.getDefaultFactory().getFactoryName() .equals(RrdNioBackendFactory.FACTORY_NAME)) { RrdBackendFactory.registerAndSetAsDefaultFactory(new RrdNioBackendFactory()); } } catch (final RrdException e) { throw new RuntimeException(e); } } }
package no.stelar7.api.r4j.basic.calling; import com.google.gson.*; import com.google.gson.reflect.TypeToken; import no.stelar7.api.r4j.basic.constants.api.*; import no.stelar7.api.r4j.basic.constants.types.RealmSpesificEnum; import no.stelar7.api.r4j.basic.exceptions.*; import no.stelar7.api.r4j.basic.ratelimiting.*; import no.stelar7.api.r4j.basic.utils.*; import org.slf4j.*; import javax.net.ssl.*; import java.io.*; import java.net.*; import java.nio.charset.StandardCharsets; import java.nio.file.*; import java.security.*; import java.security.cert.X509Certificate; import java.time.*; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; import java.util.function.BiFunction; import java.util.prefs.BackingStoreException; @SuppressWarnings("rawtypes") public class DataCallBuilder { private static final Logger logger = LoggerFactory.getLogger(DataCallBuilder.class); private final DataCall dc = new DataCall(); private static final BiFunction<String, String, String> MERGE = (o, n) -> o + "," + n; private static final BiFunction<String, String, String> MERGE_AS_SET = (o, n) -> o + n; private String requestMethod = "GET"; private String postData = ""; private static TrustManager[] trustAllCerts = new TrustManager[]{new X509TrustManager() { public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } public void checkClientTrusted(X509Certificate[] certs, String authType) { } public void checkServerTrusted(X509Certificate[] certs, String authType) { } } }; // Create all-trusting host name verifier private static HostnameVerifier allHostsValid = (hostname, session) -> true; static { try { // Install the all-trusting trust manager SSLContext sc = SSLContext.getInstance("SSL"); sc.init(null, trustAllCerts, new java.security.SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory()); } catch (KeyManagementException | NoSuchAlgorithmException e) { e.printStackTrace(); } } private static void updateRatelimiter(Enum server, Enum endpoint) { RateLimiter limiter = DataCall.getLimiter().get(server).get(endpoint); limiter.updatePermitsTakenPerX(DataCall.getCallData().get(server).get(endpoint)); } private static Map<URLEndpoint, AtomicLong> requestCount = new HashMap<>(); /** * Puts together all the data, and then returns an object representing the JSON from the call * * @param retrys the amount of retries already done (should not be passed in!) * @return an object generated from the requested JSON */ public Object build(int... retrys) { final String url = this.getURL(); logger.info("Trying url: {}", url); if (this.dc.useRatelimiter()) { if (DataCall.getCredentials() == null) { throw new APIUnsupportedActionException("No API Creds set!"); } dc.getUrlHeaders().putIfAbsent("X-Riot-Token", DataCall.getCredentials().getLoLAPIKey()); // app limit applyLimit(this.dc.getPlatform(), this.dc.getPlatform()); // method limit applyLimit(this.dc.getPlatform(), this.dc.getEndpoint()); } final DataCallResponse response = this.getResponse(url); logger.debug(response.toString()); switch (response.getResponseCode()) { case 200: case 201: case 204: { String returnValue = response.getResponseData(); if (false) { try { Path output = Paths.get("D:\\requests\\" + this.dc.getEndpoint().name() + "\\" + requestCount.computeIfAbsent(this.dc.getEndpoint(), (k) -> new AtomicLong(0)).getAndIncrement() + ".json"); Files.createDirectories(output.getParent()); Files.write(output, returnValue.getBytes(StandardCharsets.UTF_8)); } catch (IOException e) { e.printStackTrace(); } } if (this.dc.getEndpoint() != null) { final Class<?> returnType = this.dc.getEndpoint().getType(); returnValue = postProcess(returnValue); return Utils.getGson().fromJson(returnValue, returnType); } else { return returnValue; } } case 400: { String reasonText = "Your api request is malformed!\n"; reasonText += url + "\n"; throw new APIResponseException(APIHTTPErrorReason.ERROR_400, reasonText + response.getResponseData()); } case 401: { String reasonText = "The API denied your request!\n"; reasonText += "Your API key was not present in the request\n"; reasonText += "Make sure you have setup your APICredentials before doing a API call!\n"; throw new APIResponseException(APIHTTPErrorReason.ERROR_401, reasonText + response.getResponseData()); } case 403: { String reasonText = "The API denied your request!\n"; reasonText += "Your API key might be invalid\n"; reasonText += "You may be trying to call a endpoint you dont have access to\n"; reasonText += "or if you just regenerated it; wait a few seconds, then try again\n"; throw new APIResponseException(APIHTTPErrorReason.ERROR_403, reasonText + response.getResponseData()); } case 404: { return new Pair<>(response.getResponseCode(), response.getResponseData()); } case 405: { String reasonText = "The API was unable to handle your request due to the wrong HTTP method being used.\n"; throw new APIResponseException(APIHTTPErrorReason.ERROR_403, reasonText + response.getResponseData()); } case 429: if (response.getResponseData().startsWith(RateLimitType.LIMIT_UNDERLYING.getReason()) || response.getResponseData().startsWith(RateLimitType.LIMIT_SERVICE.getReason())) { return sleepAndRetry(retrys, response.getResponseCode()); } else { String error = response.getResponseData() + "429 ratelimit hit! " + "Please do not restart your application to refresh the timer! " + "This isn't supposed to happen unless you restarted your app before the last limit was hit!"; logger.error(error); } return this.build(); case 500: case 502: case 503: case 504: { return sleepAndRetry(retrys, response.getResponseCode()); } case 599: { throw new APIResponseException(APIHTTPErrorReason.ERROR_599, response.getResponseData()); } default: { break; } } System.err.println("UNHANDLED RESPONSE CODE!!!"); System.err.println("Response Code:" + response.getResponseCode()); System.err.println("Response Data:" + response.getResponseData()); throw new APINoValidResponseException(response.getResponseData()); } private String postProcess(String returnValue) { final List<URLEndpoint> ddragon = Arrays.asList(URLEndpoint.DDRAGON_CHAMPION_MANY, URLEndpoint.DDRAGON_SUMMONER_SPELLS); if (ddragon.contains(this.dc.getEndpoint())) { returnValue = postProcessDDragonMany(returnValue); } if (this.dc.getEndpoint() == URLEndpoint.DDRAGON_ITEMS || this.dc.getEndpoint() == URLEndpoint.DDRAGON_RUNES) { returnValue = postProcessDDragonAddId(returnValue); } if (this.dc.getEndpoint() == URLEndpoint.V4_MATCH) { returnValue = postProcessMatch(returnValue); } final List<URLEndpoint> summonerEndpoints = Arrays.asList(URLEndpoint.V4_SUMMONER_BY_ACCOUNT, URLEndpoint.V4_SUMMONER_BY_ID, URLEndpoint.V4_SUMMONER_BY_NAME, URLEndpoint.V4_SUMMONER_BY_PUUID); if (summonerEndpoints.contains(this.dc.getEndpoint())) { returnValue = postProcessSummoner(returnValue); } final List<URLEndpoint> apexEndpoints = Arrays.asList(URLEndpoint.V4_LEAGUE_MASTER, URLEndpoint.V4_LEAGUE_GRANDMASTER, URLEndpoint.V4_LEAGUE_CHALLENGER, URLEndpoint.V1_TFT_LEAGUE_MASTER, URLEndpoint.V1_TFT_LEAGUE_GRANDMASTER, URLEndpoint.V1_TFT_LEAGUE_CHALLENGER); if (apexEndpoints.contains(this.dc.getEndpoint())) { returnValue = postProcessApex(returnValue); } return returnValue; } private String postProcessApex(String returnValue) { JsonObject elem = (JsonObject) JsonParser.parseString(returnValue); JsonArray entries = elem.getAsJsonArray("entries"); entries.forEach(e -> { JsonObject ob = (JsonObject) e; ob.add("leagueId", elem.get("leagueId")); ob.add("queueType", elem.get("queue")); ob.add("tier", elem.get("tier")); }); return Utils.getGson().toJson(elem); } private String postProcessDDragonMany(String returnValue) { JsonObject elem = (JsonObject) JsonParser.parseString(returnValue); JsonObject parent = elem.getAsJsonObject("data"); for (String key : new HashSet<>(parent.keySet())) { JsonObject child = parent.getAsJsonObject(key); String id = child.get("key").getAsString(); child.addProperty("key", key); child.addProperty("id", id); parent.add(id, child); parent.remove(key); } return Utils.getGson().toJson(elem); } private String postProcessDDragonAddId(String returnValue) { JsonObject elem = (JsonObject) JsonParser.parseString(returnValue); JsonObject parent = elem.getAsJsonObject("data"); for (String key : new HashSet<>(parent.keySet())) { JsonObject child = parent.getAsJsonObject(key); child.addProperty("id", key); } return Utils.getGson().toJson(elem); } private String postProcessPerkPath(String returnValue) { JsonObject elem = (JsonObject) JsonParser.parseString(returnValue); String pathName = elem.get("name").getAsString(); String pathId = elem.get("id").getAsString(); JsonArray slots = elem.getAsJsonArray("slots"); for (JsonElement slot : slots) { JsonArray runes = slot.getAsJsonObject().getAsJsonArray("runes"); for (JsonElement rune : runes) { JsonObject obj = (JsonObject) rune; obj.addProperty("runePathName", pathName); obj.addProperty("runePathId", pathId); } } return Utils.getGson().toJson(elem); } private String postProcessPerkPaths(String returnValue) { JsonArray element = (JsonArray) JsonParser.parseString(returnValue); for (JsonElement elem : element) { String pathName = elem.getAsJsonObject().get("name").getAsString(); String pathId = elem.getAsJsonObject().get("id").getAsString(); JsonArray slots = elem.getAsJsonObject().getAsJsonArray("slots"); for (JsonElement slot : slots) { JsonArray runes = slot.getAsJsonObject().getAsJsonArray("runes"); for (JsonElement rune : runes) { JsonObject obj = (JsonObject) rune; obj.addProperty("runePathName", pathName); obj.addProperty("runePathId", pathId); } } } return Utils.getGson().toJson(element); } private String postProcessSummoner(String returnValue) { JsonObject element = (JsonObject) JsonParser.parseString(returnValue); element.addProperty("platform", this.dc.getPlatform().toString()); return Utils.getGson().toJson(element); } private String postProcessMatch(String returnValue) { JsonObject element = (JsonObject) JsonParser.parseString(returnValue); JsonArray participantIds = element.getAsJsonArray("participantIdentities"); for (JsonElement participant : participantIds) { JsonObject pid = participant.getAsJsonObject(); JsonObject player = participant.getAsJsonObject().getAsJsonObject("player"); if (player != null) { for (String key : player.keySet()) { pid.add(key, player.get(key)); } pid.remove("player"); } } JsonArray participants = element.getAsJsonArray("participants"); for (JsonElement participant : participants) { JsonObject stats = participant.getAsJsonObject().getAsJsonObject("stats"); JsonObject part = participant.getAsJsonObject(); if (!stats.has("perkPrimaryStyle")) { return Utils.getGson().toJson(element); } JsonObject mPerk = new JsonObject(); JsonArray array = new JsonArray(); for (int i = 0; i < 6; i++) { JsonObject perk = new JsonObject(); perk.add("perkId", stats.get("perk" + i)); perk.add("perkVar1", stats.get("perk" + i + "Var1")); perk.add("perkVar2", stats.get("perk" + i + "Var2")); perk.add("perkVar3", stats.get("perk" + i + "Var3")); array.add(perk); stats.remove("perk" + i); stats.remove("perk" + i + "Var1"); stats.remove("perk" + i + "Var2"); stats.remove("perk" + i + "Var3"); } mPerk.add("perks", array); mPerk.add("perkPrimaryStyle", stats.get("perkPrimaryStyle")); mPerk.add("perkSubStyle", stats.get("perkSubStyle")); mPerk.add("statPerk0", stats.get("statPerk0")); mPerk.add("statPerk1", stats.get("statPerk1")); mPerk.add("statPerk2", stats.get("statPerk2")); stats.remove("perkPrimaryStyle"); stats.remove("perkSubStyle"); part.add("perks", mPerk); } return Utils.getGson().toJson(element); } private Object sleepAndRetry(int[] retrys, int errorCode) { try { int attempts = (retrys != null && retrys.length == 1) ? ++retrys[0] : 1; long nextSleepDuration = attempts * 500; long totalSleepDuration = 0; for (int i = 1; i < attempts; i++) { totalSleepDuration += 500 * i; } String message = ""; if (errorCode == 429) { message = "Ratelimit reached too many times, waiting " + nextSleepDuration / 1000 + " seconds then retrying"; } else { message = "Server error (" + errorCode + ") , waiting " + nextSleepDuration / 1000 + " seconds then retrying"; } logger.info(message); if (totalSleepDuration > this.dc.getMaxSleep()) { throw new APINoValidResponseException(String.format("API did not return a valid response in time. Total sleep time is over the max sleep value %s > %s...\n" + "Try setting `DataCall.setDefaultMaxSleep(long)` to a larger number (default is 10000)", (nextSleepDuration + totalSleepDuration), this.dc.getMaxSleep())); } Thread.sleep(nextSleepDuration); return this.build(attempts); } catch (InterruptedException e) { throw new APINoValidResponseException("Something interupted the API timeout;" + e.getMessage()); } } public static final ReentrantLock lock = new ReentrantLock(); private void applyLimit(Enum platform, Enum endpoint) { lock.lock(); try { Map<Enum, RateLimiter> child = DataCall.getLimiter().getOrDefault(platform, new HashMap<>()); if (child.get(endpoint) == null) { loadLimiterFromCache(platform, endpoint, child); } } finally { lock.unlock(); } RateLimiter limitr = DataCall.getLimiter().getOrDefault(platform, new HashMap<>()).get(endpoint); if (limitr != null) { limitr.acquire(); storeLimiter(platform, endpoint); } } private void storeLimiter(Enum platform, Enum endpoint) { RateLimiter limiter = DataCall.getLimiter().get(platform).get(endpoint); String baseKey = platform.toString() + "/" + endpoint.toString(); String limitKey = "limits/" + baseKey; String firstKey = "first/" + baseKey; String callKey = "call/" + baseKey; DataCall.getRatelimiterCache().put(firstKey, Utils.getGson().toJson(limiter.getFirstCallInTime())); DataCall.getRatelimiterCache().put(callKey, Utils.getGson().toJson(limiter.getCallCountInTime())); } private void loadLimiterFromCache(Enum platform, Enum endpoint, Map<Enum, RateLimiter> child) { String baseKey = platform.toString() + "/" + endpoint.toString(); String limitKey = "limits/" + baseKey; String firstKey = "first/" + baseKey; String callKey = "call/" + baseKey; String lastLimit = DataCall.getRatelimiterCache().get(limitKey, null); String lastFirst = DataCall.getRatelimiterCache().get(firstKey, null); String lastKey = DataCall.getRatelimiterCache().get(callKey, null); if (lastLimit == null) { logger.debug("No instance of an old ratelimiter found"); return; } else { logger.debug("Loading old ratelimiter data"); } try { List<RateLimit> knownLimits = Utils.getGson().fromJson(lastLimit, new TypeToken<List<RateLimit>>() {}.getType()); Map<RateLimit, AtomicLong> knownTime = Utils.getGson().fromJson(lastFirst, new TypeToken<Map<RateLimit, AtomicLong>>() {}.getType()); Map<RateLimit, AtomicLong> knownCount = Utils.getGson().fromJson(lastKey, new TypeToken<Map<RateLimit, AtomicLong>>() {}.getType()); RateLimiter newerLimit = new BurstRateLimiter(knownLimits); newerLimit.setCallCountInTime(knownCount); newerLimit.setFirstCallInTime(knownTime); logger.debug("Loaded ratelimit for {}", endpoint); child.put(endpoint, newerLimit); DataCall.getLimiter().put(platform, child); } catch (JsonSyntaxException s) { try { logger.debug("Old ratelimiter was of incompatible type, re-creating"); DataCall.getRatelimiterCache().clear(); DataCall.getRatelimiterCache().sync(); } catch (BackingStoreException e) { e.printStackTrace(); } } } /** * Opens a connection to the URL, then reads the data into a Response. * * @param url the URL to call * @return a DataCallResponse with the data from the call * @throws APINoValidResponseException if the datacall failed in any fashion */ private DataCallResponse getResponse(final String url) { final StringBuilder data = new StringBuilder(); try { final HttpURLConnection con = (HttpURLConnection) new URL(url).openConnection(); con.setUseCaches(false); con.setDefaultUseCaches(false); con.setRequestProperty("User-Agent", "R4J"); con.setRequestProperty("Accept-Charset", "ISO-8859-1,utf-8"); con.setRequestProperty("Accept-Language", "en-US"); con.setRequestProperty("Cache-Control", "no-store,max-age=0,no-cache"); con.setRequestProperty("Expires", "0"); con.setRequestProperty("Pragma", "no-cache"); con.setRequestProperty("Connection", "keep-alive"); con.setRequestProperty("Content-Type", "application/json"); this.dc.getUrlHeaders().forEach(con::setRequestProperty); con.setRequestMethod(requestMethod); StringBuilder sb = new StringBuilder(); con.getRequestProperties().forEach((key, value) -> sb.append(String.format(Constants.TABBED2X_VERBOSE_STRING_FORMAT, key, value)).append("\n")); String printMe = new StringBuilder("\n") .append(String.format(Constants.TABBED_VERBOSE_STRING_FORMAT, "Url", url)).append("\n") .append(String.format(Constants.TABBED_VERBOSE_STRING_FORMAT, "Request Method", con.getRequestMethod())).append("\n") .append(String.format(Constants.TABBED_VERBOSE_STRING_FORMAT, "POST data", this.postData)).append("\n") .append(String.format(Constants.TABBED_VERBOSE_STRING_FORMAT, "Request Headers", "")).append("\n") .append(sb).toString(); logger.debug(printMe); if (!this.postData.isEmpty()) { con.setDoOutput(true); final DataOutputStream writer = new DataOutputStream(con.getOutputStream()); writer.writeBytes(this.postData); writer.flush(); writer.close(); } con.connect(); StringBuilder sb2 = new StringBuilder("\n"); con.getHeaderFields().forEach((key, value) -> sb2.append(String.format(Constants.TABBED2X_VERBOSE_STRING_FORMAT, key, value)).append("\n")); String printMe2 = new StringBuilder("\n").append(String.format(Constants.TABBED_VERBOSE_STRING_FORMAT, "Response Headers", "")) .append(sb2) .toString(); logger.debug(printMe2); String appA = con.getHeaderField("X-App-Rate-Limit"); String appB = con.getHeaderField("X-App-Rate-Limit-Count"); String methodA = con.getHeaderField("X-Method-Rate-Limit"); String methodB = con.getHeaderField("X-Method-Rate-Limit-Count"); if (appA == null) { logger.debug("Header 'X-App-Rate-Limit' missing from call: {} ", getURL()); } else { createRatelimiterIfMissing(appA, dc.getPlatform(), dc.getPlatform()); saveHeaderRateLimit(appB, dc.getPlatform(), dc.getPlatform()); } if (methodA == null) { logger.debug("Header 'X-Method-Rate-Limit' missing from call: {}", getURL()); } else { createRatelimiterIfMissing(methodA, dc.getPlatform(), dc.getEndpoint()); saveHeaderRateLimit(methodB, dc.getPlatform(), dc.getEndpoint()); } String deprecationHeader = con.getHeaderField("X-Riot-Deprecated"); if (deprecationHeader != null) { LocalDateTime timeout = LocalDateTime.ofEpochSecond(Long.parseLong(deprecationHeader) / 1000, 0, ZoneOffset.ofHours(-7)); logger.info("You are using a deprecated method, this method will stop working at: {}", timeout.toString()); } if (con.getResponseCode() == 429) { final RateLimitType limitType = RateLimitType.getBestMatch(con.getHeaderField("X-Rate-Limit-Type")); StringBuilder valueList = new StringBuilder(); DataCall.getLimiter().get(dc.getPlatform()).forEach((key, value) -> { valueList.append(key); valueList.append("="); valueList.append(value.getCallCountInTime()); valueList.append("\n"); }); String reasonString = String.format("%s%n%s", limitType.getReason(), valueList.toString().trim()); String reason = String.format("%s%n", reasonString); if (limitType == RateLimitType.LIMIT_METHOD) { RateLimiter limter = DataCall.getLimiter().get(this.dc.getPlatform()).get(this.dc.getEndpoint()); limter.updateSleep(con.getHeaderField("Retry-After")); limter.resetCalls(); } if (limitType == RateLimitType.LIMIT_USER) { RateLimiter limter = DataCall.getLimiter().get(this.dc.getPlatform()).get(this.dc.getPlatform()); limter.updateSleep(con.getHeaderField("Retry-After")); limter.resetCalls(); } return new DataCallResponse(con.getResponseCode(), reason); } InputStream stream = (con.getResponseCode() <= 399) ? con.getInputStream() : con.getErrorStream(); if (stream == null) { return new DataCallResponse(con.getResponseCode(), "Unable to read stream!"); } try (BufferedReader br = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) { br.lines().forEach(data::append); } con.disconnect(); return new DataCallResponse(con.getResponseCode(), data.toString()); } catch (final IOException e) { throw new APIResponseException(APIHTTPErrorReason.ERROR_599, APIHTTPErrorReason.ERROR_599.getReason()); } } private void createRatelimiterIfMissing(String methodA, Enum platform, Enum endpoint) { Map<Enum, RateLimiter> child = DataCall.getLimiter().getOrDefault(platform, new HashMap<>()); RateLimiter oldLimit = child.get(endpoint); RateLimiter newerLimit = createLimiter(methodA); if (!newerLimit.equals(oldLimit)) { newerLimit.mergeFrom(oldLimit); child.put(endpoint, newerLimit); logger.debug("Updating Ratelimit For {}", endpoint); logger.debug(newerLimit.getLimits().toString()); } DataCall.getLimiter().put(platform, child); } private synchronized void saveHeaderRateLimit(String limitCount, Enum platform, Enum endpoint) { Map<Enum, Map<Integer, Integer>> parent = DataCall.getCallData().getOrDefault(platform, new HashMap<>()); Map<Integer, Integer> child = parent.getOrDefault(endpoint, new HashMap<>()); child.putAll(parseLimitFromHeader(limitCount)); parent.put(endpoint, child); DataCall.getCallData().put(platform, parent); updateRatelimiter(platform, endpoint); storeLimiter(platform, endpoint); } private Map<Integer, Integer> parseLimitFromHeader(String headerValue) { final String[] limits = headerValue.split(","); Map<Integer, Integer> timeout = new HashMap<>(); for (final String limitPair : limits) { final String[] limit = limitPair.split(":"); final Integer call = Integer.parseInt(limit[0]); final Integer time = Integer.parseInt(limit[1]); timeout.put(time, call); } return timeout; } public RateLimiter createLimiter(String limitCount) { Map<Integer, Integer> timeout = parseLimitFromHeader(limitCount); List<RateLimit> limits = new ArrayList<>(); for (Entry<Integer, Integer> entry : timeout.entrySet()) { limits.add(new RateLimit(entry.getValue(), entry.getKey(), TimeUnit.SECONDS)); } return new BurstRateLimiter(limits); } /** * Generates the URL to use for the call. * * @return the URL to use for the call. */ private String getURL() { String[] url = {dc.getProxy()}; if (dc.getEndpoint() != null) { url[0] = url[0].replace(Constants.GAME_PLACEHOLDER, dc.getEndpoint().getGame()); url[0] = url[0].replace(Constants.SERVICE_PLACEHOLDER, dc.getEndpoint().getService()); url[0] = url[0].replace(Constants.VERSION_PLACEHOLDER, dc.getEndpoint().getVersion()); url[0] = url[0].replace(Constants.RESOURCE_PLACEHOLDER, dc.getEndpoint().getResource()); } if (dc.getPlatform() != null) { url[0] = url[0].replace(Constants.PLATFORM_PLACEHOLDER, dc.getPlatform().toString().toLowerCase()); url[0] = url[0].replace(Constants.REGION_PLACEHOLDER, ((RealmSpesificEnum) dc.getPlatform()).getRealmValue()); } dc.getUrlParams().forEach((k, v) -> url[0] = url[0].replace(k, v)); boolean first = true; for (Entry<String, String> pair : dc.getUrlData().entrySet()) { char token = first ? '?' : '&'; if (first) { first = !first; } url[0] = url[0] + token + pair.getKey() + '=' + pair.getValue(); } return url[0]; } /** * Sets the endpoint to make the call to * * @param endpoint the endpoint to make the call to * @return this */ public DataCallBuilder withEndpoint(final URLEndpoint endpoint) { this.dc.setEndpoint(endpoint); return this; } /** * enables ratelimiters for this call * * @param flag enabled or disabled * @return this */ public DataCallBuilder withLimiters(final boolean flag) { this.dc.setUseLimiters(flag); return this; } /** * Sets the headers to use with the call * * @param key the header key * @param value the header value * @return this */ public DataCallBuilder withHeader(final String key, final String value) { this.dc.getUrlHeaders().merge(key, value, MERGE); return this; } /** * Sets the data to send with the request if its a POST call * * @param data the data to send * @return this */ public DataCallBuilder withPostData(final String data) { this.postData = data; return this; } /** * The request-method on the call (usually GET or POST) * * @param method the request method * @return this */ public DataCallBuilder withRequestMethod(final String method) { this.requestMethod = method; return this; } /** * Set the platform to make this call to. (ie. EUW1) * * @param server the server to make the call to * @return this */ public DataCallBuilder withPlatform(final Enum server) { this.dc.setPlatform(server); return this; } /** * Replaces placeholders in the URL (ie. {region}) * * @param key The key to replace (ie. {region}) * @param value The data to replace it with (ie. EUW) * @return this */ public DataCallBuilder withURLDataAsSet(final String key, final String value) { this.dc.getUrlData().merge(key, (this.dc.getUrlData().get(key) != null) ? ("&" + key + "=" + value) : value, MERGE_AS_SET); return this; } /** * Adds parameters to the url (ie. ?api_key) * * @param key the parameter to add (ie. api_key) * @param value the value to add after the parameter (ie. 6fa459ea-ee8a-3ca4-894e-db77e160355e) * @return this */ public DataCallBuilder withQueryParameter(final String key, final String value) { this.dc.getUrlData().merge(key, value, MERGE); return this; } /** * Replaces placeholders in the URL (ie. {region}) * * @param key The key to replace (ie. {region}) * @param value The data to replace it with (ie. EUW) * @return this */ public DataCallBuilder withURLParameter(final String key, final String value) { this.dc.getUrlParams().merge(key, value, MERGE); return this; } public DataCallBuilder withProxy(String proxy) { this.dc.setProxy(proxy); return this; } }
package com.itmill.toolkit.demo.featurebrowser; import com.itmill.toolkit.ui.Button; import com.itmill.toolkit.ui.CustomComponent; import com.itmill.toolkit.ui.Label; import com.itmill.toolkit.ui.OrderedLayout; import com.itmill.toolkit.ui.RichTextArea; import com.itmill.toolkit.ui.Button.ClickEvent; /** * @author marc * */ public class RichTextExample extends CustomComponent { public static final String txt = "<h1>RichText editor example</h1>" + "To edit this text, press the <b>Edit</b> button below." + "<br/>" + "See the <A href=\"http: + "for more information."; private OrderedLayout main; private Label l; private RichTextArea editor; private Button b; public RichTextExample() { main = new OrderedLayout(); main.setMargin(true); setCompositionRoot(main); l = new Label(txt); l.setContentMode(Label.CONTENT_XHTML); main.addComponent(l); editor = new RichTextArea(); b = new Button("Edit", new Button.ClickListener() { public void buttonClick(ClickEvent event) { if (main.getComponentIterator().next() == l) { editor.setValue(l.getValue()); main.replaceComponent(l, editor); b.setCaption("Save"); } else { l.setValue(editor.getValue()); main.replaceComponent(editor, l); b.setCaption("Edit"); } } }); main.addComponent(b); main.setComponentAlignment(b, OrderedLayout.ALIGNMENT_RIGHT, OrderedLayout.ALIGNMENT_VERTICAL_CENTER); } }
package org.adridadou.ethereum.propeller.rpc; import org.adridadou.ethereum.propeller.Crypto; import org.adridadou.ethereum.propeller.EthereumBackend; import org.adridadou.ethereum.propeller.event.BlockInfo; import org.adridadou.ethereum.propeller.event.EthereumEventHandler; import org.adridadou.ethereum.propeller.values.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.web3j.crypto.Credentials; import org.web3j.crypto.RawTransaction; import org.web3j.crypto.TransactionEncoder; import org.web3j.protocol.core.methods.response.EthBlock; import org.web3j.protocol.core.methods.response.Log; import org.web3j.protocol.core.methods.response.Transaction; import org.web3j.utils.Numeric; import java.util.*; import java.util.stream.Collectors; public class EthereumRpc implements EthereumBackend { private static final Logger logger = LoggerFactory.getLogger(EthereumRpc.class); private final Web3JFacade web3JFacade; private final EthereumRpcEventGenerator ethereumRpcEventGenerator; private final ChainId chainId; public EthereumRpc(Web3JFacade web3JFacade, ChainId chainId, EthereumRpcConfig config) { this.web3JFacade = web3JFacade; this.ethereumRpcEventGenerator = new EthereumRpcEventGenerator(web3JFacade, config, this); this.chainId = chainId; } @Override public GasPrice getGasPrice() { return web3JFacade.getGasPrice(); } @Override public EthValue getBalance(EthAddress address) { return EthValue.wei(web3JFacade.getBalance(address).getBalance()); } @Override public boolean addressExists(EthAddress address) { return web3JFacade.getTransactionCount(address).intValue() > 0 || web3JFacade.getBalance(address).getBalance().intValue() > 0 || !web3JFacade.getCode(address).isEmpty(); } @Override public EthHash submit(TransactionRequest request, Nonce nonce) { RawTransaction tx = web3JFacade.createTransaction(nonce, getGasPrice(), request.getGasLimit(), request.getAddress(), request.getValue(), request.getData()); EthData signedMessage = EthData.of(TransactionEncoder.signMessage(tx, (byte) chainId.id, Credentials.create(Numeric.toHexStringNoPrefix(request.getAccount().getBigIntPrivateKey())))); web3JFacade.sendTransaction(signedMessage); return EthHash.of(Crypto.sha3(signedMessage).data); } @Override public GasUsage estimateGas(EthAccount account, EthAddress address, EthValue value, EthData data) { return new GasUsage(web3JFacade.estimateGas(account, address, value, data)); } @Override public Nonce getNonce(EthAddress currentAddress) { return new Nonce(web3JFacade.getTransactionCount(currentAddress)); } @Override public long getCurrentBlockNumber() { return web3JFacade.getCurrentBlockNumber(); } @Override public Optional<BlockInfo> getBlock(long number) { return web3JFacade.getBlock(number).map(this::toBlockInfo); } @Override public Optional<BlockInfo> getBlock(EthHash ethHash) { return web3JFacade.getBlock(ethHash).map(this::toBlockInfo); } @Override public SmartContractByteCode getCode(EthAddress address) { return web3JFacade.getCode(address); } @Override public EthData constantCall(EthAccount account, EthAddress address, EthValue value, EthData data) { return web3JFacade.constantCall(account, address, data); } @Override public void register(EthereumEventHandler eventHandler) { ethereumRpcEventGenerator.addListener(eventHandler); } @Override public Optional<TransactionInfo> getTransactionInfo(EthHash hash) { return Optional.ofNullable(web3JFacade.getReceipt(hash)).flatMap(web3jReceipt -> Optional.ofNullable(web3JFacade.getTransaction(hash)) .map(transaction -> { TransactionReceipt receipt = toReceipt(transaction, web3jReceipt); TransactionStatus status = transaction.getBlockHash().isEmpty() ? TransactionStatus.Unknown : TransactionStatus.Executed; return new TransactionInfo(hash, receipt, status, EthHash.of(transaction.getBlockHash())); }) ); } BlockInfo toBlockInfo(EthBlock ethBlock) { return Optional.ofNullable(ethBlock.getBlock()).map(block -> { try { Map<String, EthBlock.TransactionObject> txObjects = block.getTransactions().stream() .map(tx -> (EthBlock.TransactionObject) tx.get()).collect(Collectors.toMap(EthBlock.TransactionObject::getHash, e -> e)); Map<String, org.web3j.protocol.core.methods.response.TransactionReceipt> receipts = txObjects.values().stream() .map(tx -> Optional.ofNullable(web3JFacade.getReceipt(EthHash.of(tx.getHash())))) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toMap(org.web3j.protocol.core.methods.response.TransactionReceipt::getTransactionHash, e -> e)); List<TransactionReceipt> receiptList = receipts.entrySet().stream() .map(entry -> toReceipt(txObjects.get(entry.getKey()), entry.getValue())).collect(Collectors.toList()); return new BlockInfo(block.getNumber().longValue(), receiptList); } catch (Throwable ex) { logger.error("error while converting to block info", ex); return new BlockInfo(block.getNumber().longValue(), Collections.emptyList()); } }).orElseGet(() -> new BlockInfo(-1, new ArrayList<>())); } private TransactionReceipt toReceipt(Transaction tx, org.web3j.protocol.core.methods.response.TransactionReceipt receipt) { boolean successful = !receipt.getGasUsed().equals(tx.getGas()); String error = ""; if (!successful) { error = "All the gas was used! an error occurred"; } return new TransactionReceipt(EthHash.of(receipt.getTransactionHash()), EthHash.of(receipt.getBlockHash()), EthAddress.of(receipt.getFrom()), EthAddress.of(receipt.getTo()), EthAddress.of(receipt.getContractAddress()), EthData.of(tx.getInput()), error, EthData.empty(), successful, toEventInfos(EthHash.of(receipt.getTransactionHash()), receipt.getLogs()), EthValue.wei(tx.getValue())); } private List<EventData> toEventInfos(EthHash transactionHash, List<Log> logs) { return logs.stream().map(log -> this.toEventInfo(transactionHash, log)).collect(Collectors.toList()); } private EventData toEventInfo(EthHash transactionHash, Log log) { List<EthData> topics = log.getTopics().stream().map(EthData::of).collect(Collectors.toList()); if(topics.size() > 0) { EthData eventSignature = topics.get(0); EthData eventArguments = EthData.of(log.getData()); return new EventData(transactionHash, eventSignature, eventArguments, topics.subList(1, topics.size())); } else { return new EventData(transactionHash, EthData.empty(), EthData.empty(), new ArrayList<>()); } } }
package org.agmip.translators.dssat; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.agmip.common.Functions; import org.agmip.core.types.TranslatorInput; import static org.agmip.translators.dssat.DssatCommonInput.*; import static org.agmip.util.MapUtil.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author Meng Zhang */ public class DssatControllerInput implements TranslatorInput { private final DssatXFileInput mgnReader = new DssatXFileInput(); private final DssatSoilInput soilReader = new DssatSoilInput(); private final DssatWeatherInput wthReader = new DssatWeatherInput(); private final DssatAFileInput obvAReader = new DssatAFileInput(); private final DssatTFileInput obvTReader = new DssatTFileInput(); private final DssatCulFileInput culReader = new DssatCulFileInput(); private static final Logger LOG = LoggerFactory.getLogger(DssatControllerInput.class); /** * All DSSAT Data input method, used for single file or zip package * * @param arg0 The path of input experiment file * @return result data holder object */ @Override public HashMap readFile(String arg0) { HashMap brMap; try { brMap = getBufferReader(arg0); } catch (FileNotFoundException fe) { LOG.warn("File not found under following path : [" + arg0 + "]!"); return new HashMap(); } catch (IOException e) { LOG.error(Functions.getStackTrace(e)); return new HashMap(); } return read(brMap); } /** * All DSSAT Data input method, used for uncompressed multiple files * * @param files The list of model input files for translation * @return result data holder object */ public HashMap readFile(List<File> files) { HashMap brMap; try { brMap = getBufferReader(files); } catch (IOException e) { LOG.error(Functions.getStackTrace(e)); return new HashMap(); } return read(brMap); } /** * All DSSAT Data input method, specially used for DSSAT files generated by CRAFT * * @param arg0 The path of CRAFT working folder * @return result data holder object */ public HashMap readFileFromCRAFT(String arg0) { HashMap brMap; try { File dir = new File(arg0); // Data frin CRAFT with DSSAT format if (dir.isDirectory()) { List<File> files = new ArrayList(); for (File f : dir.listFiles()) { String name = f.getName().toUpperCase(); // XFile folder if (name.equals("FILEX")) { for (File exp : f.listFiles()) { if (exp.isFile()) { String expName = exp.getName().toUpperCase(); if (expName.matches(".+\\.\\w{2}X")) { files.add(exp); } } } } // Weather folder else if (name.equals("WEATHER")) { for (File wth : f.listFiles()) { if (wth.isFile()) { String wthName = wth.getName().toUpperCase(); if (wthName.endsWith(".WTH")) { files.add(wth); } } } } // Soil file else if (f.isFile() && name.endsWith(".SOL")) { files.add(f); } } brMap = getBufferReader(files); } else { LOG.error("You need to provide the CRAFT working folder used for generating DSSAT files."); return new HashMap(); } } catch (IOException e) { LOG.error(Functions.getStackTrace(e)); return new HashMap(); } return read(brMap); } private HashMap read(HashMap brMap) { HashMap ret = new HashMap(); HashMap metaData = new HashMap(); ArrayList<HashMap> expArr = new ArrayList<HashMap>(); HashMap expData; ArrayList<HashMap> mgnArr; ArrayList<HashMap> soilArr; HashMap soilData; HashMap soilTmpMap = new HashMap(); HashMap<String, String> soilAnalysisMap = new HashMap(); String soilId; ArrayList<HashMap> wthArr; HashMap wthData; HashMap wthTmpMap = new HashMap(); String wthId; HashMap obvAFiles; HashMap obvAFile; ArrayList<HashMap> obvAArr; HashMap obvTFiles; HashMap obvTFile; ArrayList<HashMap> obvTArr; ArrayList<HashMap> culArr; HashMap culData; try { // Set Data source and version info setDataVersionInfo(metaData); // Try to read XFile (treatment; management) mgnArr = mgnReader.readTreatments(brMap, metaData); // Try to read soil File soilArr = soilReader.readSoilSites(brMap, metaData); // Try to read weather File wthArr = wthReader.readDailyData(brMap, metaData); // Try to read Observed AFile (summary data) obvAFiles = obvAReader.readObvData(brMap); // Try to read Observed AFile (time-series data) obvTFiles = obvTReader.readObvData(brMap); // Try to read cultivar File culArr = culReader.readCultivarData(brMap, metaData); } catch (FileNotFoundException fe) { LOG.warn(Functions.getStackTrace(fe)); return ret; } catch (IOException e) { LOG.error(Functions.getStackTrace(e)); return ret; } // Combine the each part of data for (int i = 0; i < mgnArr.size(); i++) { // Set meta data block for this treatment expData = mgnReader.setupMetaData(metaData, i); // Set soil data for this treatment wthId = getValueOr(expData, "wst_id", "0"); if (!wthId.equals("0")) { wthData = getSectionDataWithNocopy(wthArr, "wst_id", wthId); if (wthData != null && !wthData.isEmpty()) { // expData.put(wthReader.jsonKey, wthData); wthTmpMap.put(wthId, wthData); } else { wthId = getValueOr(expData, "dssat_wst_id", "0"); wthData = getSectionDataWithNocopy(wthArr, "wst_id", wthId); if (wthData != null && !wthData.isEmpty()) { wthTmpMap.put(wthId, wthData); expData.put("wst_id", wthId); } } } // Set weather data for this treatment soilId = getValueOr(expData, "soil_id", "0"); if (!soilId.equals("0") && !soilTmpMap.containsKey(soilId)) { soilData = getSectionDataWithNocopy(soilArr, "soil_id", soilId); // if there is soil analysis data, create new soil block by using soil analysis info if (expData.get("soil_analysis") != null) { HashMap saTmp = (HashMap) expData.remove("soil_analysis"); String saHash = saTmp.hashCode() + "_" + soilId; if (!soilAnalysisMap.containsKey(saHash)) { if (soilData == null) { soilData = new HashMap(); } else { soilData = CopyList(soilData); } // Update soil site data copyItem(soilData, saTmp, "sadat"); copyItem(soilData, saTmp, "smhb"); copyItem(soilData, saTmp, "smpx"); copyItem(soilData, saTmp, "smke"); soilId += "_" + (i + 1); soilData.put("soil_id", soilId); expData.put("soil_id", soilId); soilAnalysisMap.put(saHash, soilId); // Update soil layer data ArrayList<HashMap> soilLyrs = getObjectOr(soilData, soilReader.layerKey, new ArrayList()); ArrayList<HashMap> saLyrs = getObjectOr(saTmp, mgnReader.icEventKey, new ArrayList()); String[] copyKeys = {"sllb", "slbdm", "sloc", "slni", "slphw", "slphb", "slpx", "slke", "slsc"}; soilData.put(soilReader.layerKey, combinLayers(soilLyrs, saLyrs, "sllb", "sllb", copyKeys)); } else { expData.put("soil_id", soilAnalysisMap.get(saHash)); soilData = null; } } if (soilData != null && !soilData.isEmpty()) { // expData.put(soilReader.jsonKey, soilData); soilTmpMap.put(soilId, soilData); } } // Get exname String exname = (String) expData.remove("exname_o"); if (exname == null) { exname = ""; } // observed data (summary) obvAFile = getObjectOr(obvAFiles, exname, new HashMap()); obvAArr = getObjectOr(obvAFile, obvAReader.obvDataKey, new ArrayList<HashMap>()); HashMap obv = new HashMap(); expData.put(obvAReader.jsonKey, obv); if (!getValueOr(expData, "trno", "0").equals("0")) { HashMap tmp = getSectionDataWithNocopy(obvAArr, "trno_a", expData.get("trno").toString()); if (tmp != null) { obv.putAll(tmp); } } // observed data (time-series) obvTFile = getObjectOr(obvTFiles, exname, new HashMap()); obvTArr = getObjectOr(obvTFile, obvTReader.obvDataKey, new ArrayList<HashMap>()); if (!getValueOr(expData, "trno", "0").equals("0")) { HashMap tmp = getSectionDataWithNocopy(obvTArr, "trno_t", expData.get("trno").toString()); if (tmp != null) { obv.put("timeSeries", tmp.get(obvTReader.obvDataKey)); } } // there is no observed data, remove the key from experiment object if (obv.isEmpty()) { expData.remove(obvAReader.jsonKey); } // Set experiment data include management, Initial Condition and DSSAT specific data blocks for this treatment mgnReader.setupTrnData(expData, mgnArr.get(i), obvAFiles, obvTFiles); // Set dssat cultivar info block if (!culArr.isEmpty()) { HashMap mgnData = getObjectOr(expData, mgnReader.jsonKey, new HashMap()); ArrayList<HashMap> eventArr = getObjectOr(mgnData, "events", new ArrayList()); ArrayList<HashMap> culTmpArr = new ArrayList<HashMap>(); for (int j = 0; j < eventArr.size(); j++) { if (getObjectOr(eventArr.get(j), "event", "").equals("planting")) { culData = getSectionDataWithNocopy(culArr, "cul_id", (String) eventArr.get(j).get("cul_id")); if (culData != null) { culTmpArr.add(culData); } } } if (!culTmpArr.isEmpty()) { HashMap tmp = new HashMap(); tmp.put(culReader.dataKey, culTmpArr); expData.put(culReader.jsonKey, tmp); } } // Add to output array expArr.add(expData); } if (!expArr.isEmpty()) { ret.put("experiments", expArr); if (!soilTmpMap.isEmpty()) { ret.put("soils", new ArrayList(soilTmpMap.values())); } if (!wthTmpMap.isEmpty()) { ret.put("weathers", new ArrayList(wthTmpMap.values())); } } else { // If only weather data or soil data if (!soilArr.isEmpty()) { ret.put("soils", soilArr); } if (!wthArr.isEmpty()) { ret.put("weathers", wthArr); } } return ret; } }
package com.jcwhatever.arborianprotect.listeners; import com.jcwhatever.arborianprotect.IProtected; import com.jcwhatever.arborianprotect.filters.FilterPermission; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.block.Block; import org.bukkit.entity.EntityType; import org.bukkit.event.Cancellable; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.block.BlockFadeEvent; import org.bukkit.event.block.BlockGrowEvent; import org.bukkit.event.block.BlockIgniteEvent; import org.bukkit.event.block.BlockIgniteEvent.IgniteCause; import org.bukkit.event.block.BlockSpreadEvent; import org.bukkit.event.block.LeavesDecayEvent; import org.bukkit.event.entity.EntityChangeBlockEvent; import org.bukkit.event.entity.EntityExplodeEvent; import java.util.List; /** * Block event filter listener. */ public class BlockListener implements Listener { private static final Location LEAF_DECAY_LOCATION = new Location(null, 0, 0, 0); private static final Location BLOCK_FADE_LOCATION = new Location(null, 0, 0, 0); private static final Location BLOCK_SPREAD_LOCATION = new Location(null, 0, 0, 0); private static final Location BLOCK_IGNITE_LOCATION = new Location(null, 0, 0, 0); private static final Location EXPLOSION_LOCATION = new Location(null, 0, 0, 0); private static final Location ENTITY_CHANGE_BLOCK_LOCATION = new Location(null, 0, 0, 0); private static final EventProcessor<Cancellable> LEAF_DECAY = new EventProcessor<Cancellable>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getLeafDecay(); } }; private static final EventProcessor<Cancellable> ICE_MELT = new EventProcessor<Cancellable>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getIceMelt(); } }; private static final EventProcessor<Cancellable> SNOW_MELT = new EventProcessor<Cancellable>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getSnowMelt(); } }; private static final EventProcessor<Cancellable> SOIL_DEHYDRATE = new EventProcessor<Cancellable>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getSoilDehydrate(); } }; private static final EventProcessor<Cancellable> VINE_GROWTH = new EventProcessor<Cancellable>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getVineGrowth(); } }; private static final EventProcessor<Cancellable> MUSHROOM_GROWTH = new EventProcessor<Cancellable>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getMushroomGrowth(); } }; private static final EventProcessor<Cancellable> GRASS_GROWTH = new EventProcessor<Cancellable>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getGrassGrowth(); } }; private static final EventProcessor<Cancellable> MYCEL_GROWTH = new EventProcessor<Cancellable>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getMyceliumGrowth(); } }; private static final EventProcessor<Cancellable> FIRE_SPREAD = new EventProcessor<Cancellable>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getFireSpread(); } }; private static final EventProcessor<EntityExplodeEvent> TNT_DAMAGE = new EventProcessor<EntityExplodeEvent>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getTntDamage(); } @Override protected void setCancelled(EntityExplodeEvent event, boolean isCancelled) { if (isCancelled) event.blockList().clear(); } }; private static final EventProcessor<EntityExplodeEvent> CREEPER_DAMAGE = new EventProcessor<EntityExplodeEvent>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getCreeperDamage(); } @Override protected void setCancelled(EntityExplodeEvent event, boolean isCancelled) { if (isCancelled) event.blockList().clear(); } }; private static final EventProcessor<EntityExplodeEvent> FIREBALL_DAMAGE = new EventProcessor<EntityExplodeEvent>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getFireballDamage(); } @Override protected void setCancelled(EntityExplodeEvent event, boolean isCancelled) { if (isCancelled) event.blockList().clear(); } }; private static final EventProcessor<EntityChangeBlockEvent> MOB_CHANGE_BLOCK_EVENT = new EventProcessor<EntityChangeBlockEvent>() { @Override public FilterPermission getPermission(IProtected target) { return target.getBlockEventFilter().getMobChangeBlock(); } }; @EventHandler(priority = EventPriority.LOW) private void onLeafDecay(LeavesDecayEvent event) { Location location = event.getBlock().getLocation(LEAF_DECAY_LOCATION); LEAF_DECAY.processEvent(location, event); } @EventHandler(priority = EventPriority.LOW) private void onBlockFade(BlockFadeEvent event) { Location location = event.getBlock().getLocation(BLOCK_FADE_LOCATION); Material material = event.getBlock().getType(); if (material != Material.ICE && material != Material.SNOW && material != Material.SOIL) return; switch (material) { case ICE: ICE_MELT.processEvent(location, event); break; case SNOW: SNOW_MELT.processEvent(location, event); break; case SOIL: SOIL_DEHYDRATE.processEvent(location, event); break; } } @EventHandler(priority = EventPriority.LOW) private void onBlockSpread(BlockSpreadEvent event) { Location location = event.getBlock().getLocation(BLOCK_SPREAD_LOCATION); Material material = event.getBlock().getType(); switch (material) { case VINE: VINE_GROWTH.processEvent(location, event); break; case RED_MUSHROOM: // fall through case BROWN_MUSHROOM: MUSHROOM_GROWTH.processEvent(location, event); break; case GRASS: GRASS_GROWTH.processEvent(location, event); break; case MYCEL: MYCEL_GROWTH.processEvent(location, event); break; } } @EventHandler(priority = EventPriority.LOW) private void onBlockGrow(BlockGrowEvent event) { Location location = event.getBlock().getLocation(BLOCK_SPREAD_LOCATION); Material material = event.getBlock().getType(); switch (material) { case VINE: VINE_GROWTH.processEvent(location, event); break; case RED_MUSHROOM: // fall through case BROWN_MUSHROOM: MUSHROOM_GROWTH.processEvent(location, event); break; case GRASS: GRASS_GROWTH.processEvent(location, event); break; case MYCEL: MYCEL_GROWTH.processEvent(location, event); break; } } @EventHandler(priority = EventPriority.LOW) private void onBlockIgnite(BlockIgniteEvent event) { Location location = event.getBlock().getLocation(BLOCK_IGNITE_LOCATION); IgniteCause cause = event.getCause(); if (cause != IgniteCause.SPREAD) return; FIRE_SPREAD.processEvent(location, event); } @EventHandler(priority = EventPriority.LOW) private void onExplosion(EntityExplodeEvent event) { EntityType type = event.getEntityType(); if (type != EntityType.PRIMED_TNT && type != EntityType.CREEPER && type != EntityType.FIREBALL && type != EntityType.SMALL_FIREBALL) { return; } List<Block> blocks = event.blockList(); for (Block block : blocks) { Location location = block.getLocation(EXPLOSION_LOCATION); switch (type) { case PRIMED_TNT: if (TNT_DAMAGE.processEvent(location, event)) return; break; case CREEPER: if (CREEPER_DAMAGE.processEvent(location, event)) return; break; case FIREBALL: // fallthrough case SMALL_FIREBALL: if (FIREBALL_DAMAGE.processEvent(location, event)) return; break; } } } @EventHandler(priority = EventPriority.LOW) private void onMobChangeBlock(EntityChangeBlockEvent event) { if (!event.getEntity().getType().isAlive()) return; Location location = event.getBlock().getLocation(ENTITY_CHANGE_BLOCK_LOCATION); MOB_CHANGE_BLOCK_EVENT.processEvent(location, event); } }
package org.amc.game.chess; import org.amc.game.chess.ChessBoard.ChessPieceLocation; import org.amc.game.chess.ChessBoard.Coordinate; import edu.emory.mathcs.backport.java.util.Collections; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; public class PlayersKingCheckmateCondition { private PlayerKingInCheckCondition kingIsChecked = new PlayerKingInCheckCondition(); private Player player; private Player opponent; private ChessBoard board; private ChessPieceLocation playersKingLocation; private List<ChessPieceLocation> enemyLocations; private List<ChessPieceLocation> attackingPieces; private List<ChessPieceLocation> playersPieces; public PlayersKingCheckmateCondition(Player player, Player opponent, ChessBoard board) { this.player = player; this.opponent = opponent; this.board = board; this.playersKingLocation = findThePlayersKing(); this.enemyLocations = board.getListOfPlayersPiecesOnTheBoard(opponent); this.attackingPieces = getAllPiecesAttackingTheKing(); this.playersPieces = board.getListOfPlayersPiecesOnTheBoard(player); } /** * Checks to see if the player's king is checkmated. * * @param player * Player whos king ChessPiece is checkmated * @param board * ChessBoard * @return Boolean true if checkmate has occurred */ boolean isCheckMate() { return isPlayersKingInCheck() && isKingNotAbleToMoveOutOfCheck() && canAttackingPieceNotBeCaptured() && canAttackingPieceNotBeBlocked(); } /** * Player's king has no safe squares to move to * * @return Boolean */ boolean isKingNotAbleToMoveOutOfCheck() { Set<Location> possibleSafeMoveLocations = findAllSafeMoveLocations(playersKingLocation); return possibleSafeMoveLocations.isEmpty(); } private ChessPieceLocation findThePlayersKing() { Location kingLocation = board.getPlayersKingLocation(player); return new ChessPieceLocation(board.getPieceFromBoardAt(kingLocation), kingLocation); } private Set<Location> findAllSafeMoveLocations(ChessPieceLocation kingsLocation) { Set<Location> possibleMoveLocations = getAllTheKingsPossibleMoveLocations(kingsLocation); board.removePieceOnBoardAt(kingsLocation.getLocation()); Set<Location> squaresUnderAttack = new HashSet<>(); for (Location location : possibleMoveLocations) { for (ChessPieceLocation enemyPiece : enemyLocations) { Move move = new Move(enemyPiece.getLocation(), location); ChessPiece piece = enemyPiece.getPiece(); if (piece.isValidMove(board, move)) { squaresUnderAttack.add(location); break; } } } possibleMoveLocations.removeAll(squaresUnderAttack); board.putPieceOnBoardAt(kingsLocation.getPiece(), kingsLocation.getLocation()); return possibleMoveLocations; } private Set<Location> getAllTheKingsPossibleMoveLocations(ChessPieceLocation kingsLocation) { return ((KingPiece) kingsLocation.getPiece()).getPossibleMoveLocations(board, kingsLocation.getLocation()); } /** * Checks to see if the Player can capture the attacking ChessPiece Only if * the capture doesn't lead to the King still being checked * * @param player * Player * @param board * ChessBoard * @return Boolean */ boolean canAttackingPieceNotBeCaptured() { if (isThereMoreThanOneAttacker()) { return true; } Location attackingPieceLocation = attackingPieces.get(0).getLocation(); for (ChessPieceLocation cpl : playersPieces) { ChessPiece piece = cpl.getPiece(); Move move = new Move(cpl.getLocation(), attackingPieceLocation); if (piece.isValidMove(board, move)) { if (willPlayerBeInCheck(move)) { continue; } else { return false; } } } return true; } private boolean willPlayerBeInCheck(Move move){ ReversibleMove checkMove = new ReversibleMove(board, move); checkMove.testMove(); boolean playersKingInCheck=isPlayersKingInCheck(); undoMove(checkMove); return playersKingInCheck; } private boolean isThereMoreThanOneAttacker() { return attackingPieces.size() != 1; } private void undoMove(ReversibleMove move) { try { move.undoMove(); } catch (InvalidMoveException ime) { throw new RuntimeException( "In canAttackingPieceBeCaptured method:Chessboard in inconsistent state"); } } /** * Checks to see if the attacking ChessPiece can be blocked * * @param player * @param board * @return Boolean true if the attacking ChessPiece can be blocked. */ boolean canAttackingPieceNotBeBlocked() { if (isThereMoreThanOneAttacker()) { return true; } Location attackingPieceLocation = attackingPieces.get(0).getLocation(); ChessPiece attacker = board.getPieceFromBoardAt(attackingPieceLocation); Move move = new Move(attackingPieceLocation, playersKingLocation.getLocation()); Set<Location> blockingSquares = getAllSquaresInAMove(attacker, move); for (Location blockingSquare : blockingSquares) { for (ChessPieceLocation cpl : playersPieces) { Move blockingMove = new Move(cpl.getLocation(), blockingSquare); ChessPiece piece = cpl.getPiece(); if (!(piece instanceof KingPiece) && piece.isValidMove(board, blockingMove)) { if (willPlayerBeInCheck(blockingMove)) { continue; } else { return false; } } } } return true; } boolean isPlayersKingInCheck() { return this.kingIsChecked.isPlayersKingInCheck(player, opponent, board); } /** * Returns a Set of Squares covered in a move Not including the start and * end squares * * @param move * Move * @return Set of Locations */ private Set<Location> getAllSquaresInAMove(ChessPiece piece, Move move) { Set<Location> squares = new HashSet<>(); if (piece.canSlide()) { int distance = Math.max(move.getAbsoluteDistanceX(), move.getAbsoluteDistanceY()); int positionX = move.getStart().getLetter().getIndex(); int positionY = move.getStart().getNumber(); for (int i = 0; i < distance - 1; i++) { positionX = positionX + 1 * (int) Math.signum(move.getDistanceX()); positionY = positionY + 1 * (int) Math.signum(move.getDistanceY()); squares.add(new Location(Coordinate.values()[positionX], positionY)); } } return squares; } /** * Find all opponents pieces directly attacking the king * * @param player * Player * @param opponent * Player * @param board * ChessBoard * @return List of ChessPieceLocation of attacking pieces */ List<ChessPieceLocation> getAllPiecesAttackingTheKing() { List<ChessPieceLocation> attackingPieces = new ArrayList<>(); for (ChessPieceLocation cpl : enemyLocations) { Move move = new Move(cpl.getLocation(), playersKingLocation.getLocation()); if (cpl.getPiece().isValidMove(board, move)) { attackingPieces.add(cpl); } } return attackingPieces; } }
package com.vectrace.MercurialEclipse.history; import static com.vectrace.MercurialEclipse.preferences.MercurialPreferenceConstants.*; import java.lang.reflect.InvocationTargetException; import org.eclipse.core.resources.IFile; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IMenuListener; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.action.Separator; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.text.BadLocationException; import org.eclipse.jface.text.Document; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.text.IRegion; import org.eclipse.jface.text.ITextOperationTarget; import org.eclipse.jface.text.TextViewer; import org.eclipse.jface.text.source.SourceViewer; import org.eclipse.jface.viewers.DoubleClickEvent; import org.eclipse.jface.viewers.IDoubleClickListener; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.custom.StyledText; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Menu; import org.eclipse.team.ui.TeamOperation; import org.eclipse.team.ui.history.IHistoryPageSite; import org.eclipse.ui.IActionBars; import org.eclipse.ui.IWorkbenchActionConstants; import org.eclipse.ui.actions.BaseSelectionListenerAction; import org.eclipse.ui.part.IPageSite; import org.eclipse.ui.texteditor.ITextEditorActionConstants; import com.vectrace.MercurialEclipse.MercurialEclipsePlugin; import com.vectrace.MercurialEclipse.commands.HgPatchClient; import com.vectrace.MercurialEclipse.exception.HgException; import com.vectrace.MercurialEclipse.model.FileStatus; import com.vectrace.MercurialEclipse.utils.ResourceUtils; import com.vectrace.MercurialEclipse.wizards.Messages; public class ChangedPathsPage { private static final String IMG_COMMENTS = "comments.gif"; //$NON-NLS-1$ private static final String IMG_DIFFS = "diffs.gif"; //$NON-NLS-1$ private static final String IMG_AFFECTED_PATHS_FLAT_MODE = "flatLayout.gif"; //$NON-NLS-1$ private SashForm mainSashForm; private SashForm innerSashForm; private boolean showComments; private boolean showAffectedPaths; private boolean showDiffs; private boolean wrapCommentsText; private ChangePathsTableProvider changePathsViewer; private TextViewer commentTextViewer; private TextViewer diffTextViewer; private final IPreferenceStore store = MercurialEclipsePlugin.getDefault() .getPreferenceStore(); private ToggleAffectedPathsOptionAction[] toggleAffectedPathsLayoutActions; private final MercurialHistoryPage page; public ChangedPathsPage(MercurialHistoryPage page, Composite parent) { this.page = page; init(parent); } private void init(Composite parent) { this.showComments = store.getBoolean(PREF_SHOW_COMMENTS); this.wrapCommentsText = store.getBoolean(PREF_WRAP_COMMENTS); this.showAffectedPaths = store.getBoolean(PREF_SHOW_PATHS); this.showDiffs = store.getBoolean(PREF_SHOW_DIFFS); this.mainSashForm = new SashForm(parent, SWT.VERTICAL); this.mainSashForm.setLayoutData(new GridData( GridData.FILL_BOTH)); this.toggleAffectedPathsLayoutActions = new ToggleAffectedPathsOptionAction[] { new ToggleAffectedPathsOptionAction(this, "HistoryView.affectedPathsHorizontalLayout", //$NON-NLS-1$ PREF_AFFECTED_PATHS_LAYOUT, LAYOUT_HORIZONTAL), new ToggleAffectedPathsOptionAction(this, "HistoryView.affectedPathsVerticalLayout", //$NON-NLS-1$ PREF_AFFECTED_PATHS_LAYOUT, LAYOUT_VERTICAL), }; } public void createControl() { createRevisionDetailViewers(); addSelectionListeners(); contributeActions(); } private void addSelectionListeners() { page.getTableViewer().addSelectionChangedListener( new ISelectionChangedListener() { private Object currentLogEntry; private int currentNumberOfSelectedElements; public void selectionChanged(SelectionChangedEvent event) { ISelection selection = event.getSelection(); Object logEntry = ((IStructuredSelection) selection).getFirstElement(); int nrOfSelectedElements = ((IStructuredSelection) selection).size(); if (logEntry != currentLogEntry || nrOfSelectedElements != currentNumberOfSelectedElements) { this.currentLogEntry = logEntry; this.currentNumberOfSelectedElements = nrOfSelectedElements; updatePanels(selection); } } }); changePathsViewer.addSelectionChangedListener(new ISelectionChangedListener() { private Object selectedChangePath; public void selectionChanged(SelectionChangedEvent event) { IStructuredSelection selection = (IStructuredSelection) event.getSelection(); FileStatus changePath = (FileStatus) selection.getFirstElement(); if (changePath != selectedChangePath) { selectedChangePath = changePath; selectInDiffViewerAndScroll(changePath); } } }); } private void selectInDiffViewerAndScroll(FileStatus selectedChangePath) { if(selectedChangePath == null) { return; } String pathAsString = selectedChangePath.getRootRelativePath().toString(); // Note: this is a plain text search for the path in the diff text // This could be refined with a regular expression matching the // whole diff line. int offset = diffTextViewer.getDocument().get().indexOf(pathAsString); if(offset != -1) { selectInDiffViewerAndScrollToPosition(offset, pathAsString.length()); } } private void selectInDiffViewerAndScrollToPosition(int offset, int length) { try { diffTextViewer.setSelectedRange(offset, length); int line = diffTextViewer.getDocument().getLineOfOffset(offset); diffTextViewer.setTopIndex(line); } catch (BadLocationException e) { MercurialEclipsePlugin.logError(e); } } /** * Creates the detail viewers (commentViewer, changePathsViewer and diffViewer) shown * below the table of revisions. Will rebuild these viewers after a layout change. */ private void createRevisionDetailViewers() { disposeExistingViewers(); int layout = store.getInt(PREF_AFFECTED_PATHS_LAYOUT); int swtOrientation = layout == LAYOUT_HORIZONTAL ? SWT.HORIZONTAL: SWT.VERTICAL; innerSashForm = new SashForm(mainSashForm, swtOrientation); createText(innerSashForm); changePathsViewer = new ChangePathsTableProvider(innerSashForm, this); createDiffViewer(innerSashForm); setViewerVisibility(); refreshLayout(); } private void disposeExistingViewers() { if (innerSashForm != null) { innerSashForm.dispose(); } if (commentTextViewer != null) { commentTextViewer.getControl().dispose(); } if (changePathsViewer != null) { changePathsViewer.getControl().dispose(); } if (diffTextViewer != null) { diffTextViewer.getControl().dispose(); } } private void createDiffViewer(SashForm parent) { SourceViewer sourceViewer = new SourceViewer(parent, null, null, true, SWT.H_SCROLL | SWT.V_SCROLL | SWT.MULTI | SWT.READ_ONLY); sourceViewer.getTextWidget().setIndent(2); diffTextViewer = sourceViewer; diffTextViewer.setDocument(new Document()); } private void updatePanels(ISelection selection) { if (!(selection instanceof IStructuredSelection)) { clearTextChangePathsAndDiffTextViewers(); return; } Object[] selectedElememts = ((IStructuredSelection) selection).toArray(); if (selectedElememts.length == 1) { MercurialRevision revision = (MercurialRevision) selectedElememts[0]; updatePanelsAfterSelectionOf(revision); } else if (selectedElememts.length == 2) { MercurialRevision youngerRevision = (MercurialRevision) selectedElememts[0]; MercurialRevision olderRevision = (MercurialRevision) selectedElememts[1]; updatePanelsAfterSelectionOf(olderRevision, youngerRevision); } else { clearTextChangePathsAndDiffTextViewers(); } } private void clearTextChangePathsAndDiffTextViewers() { commentTextViewer.setDocument(new Document("")); //$NON-NLS-1$ changePathsViewer.setInput(null); diffTextViewer.setDocument(new Document("")); //$NON-NLS-1$ } private void updatePanelsAfterSelectionOf(MercurialRevision revision) { commentTextViewer.setDocument(new Document(revision.getChangeSet().getComment())); changePathsViewer.setInput(revision); updateDiffPanelFor(revision, null); } private void updatePanelsAfterSelectionOf(MercurialRevision firstRevision, MercurialRevision secondRevision) { // TODO update to combined comment commentTextViewer.setDocument(new Document()); // TODO update to combined file list changePathsViewer.setInput(null); updateDiffPanelFor(firstRevision, secondRevision); } private void updateDiffPanelFor(final MercurialRevision entry, final MercurialRevision secondEntry) { if(!showDiffs) { diffTextViewer.setDocument(new Document()); return; } TeamOperation operation = new TeamOperation(page.getHistoryPageSite().getPart(), null) { public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException { monitor.beginTask(getJobName(), 2); try { final String diff = HgPatchClient.getDiff(entry.getChangeSet().getHgRoot() , entry, secondEntry); monitor.worked(1); page.getControl().getDisplay().syncExec(new Runnable() { public void run() { diffTextViewer.setDocument(new Document(diff)); applyLineColoringToDiffViewer(); } }); monitor.worked(1); monitor.done(); } catch (HgException e) { MercurialEclipsePlugin.logError(e); } } @Override protected String getJobName() { // TODO Replace this with from resource return "Update diff viewer"; } @Override protected boolean shouldRun() { return true; } @Override protected boolean canRunAsJob() { return true; } @Override public boolean isUserInitiated() { return false; } }; try { operation.run(); } catch (InvocationTargetException e) { MercurialEclipsePlugin.logError(e); } catch (InterruptedException e) { MercurialEclipsePlugin.logError(e); } } private void applyLineColoringToDiffViewer() { IDocument document = diffTextViewer.getDocument(); int nrOfLines = document.getNumberOfLines(); for (int i = 0; i < nrOfLines; i++) { try { IRegion lineInformation = document.getLineInformation(i); int offset = lineInformation.getOffset(); int length = lineInformation.getLength(); Color lineColor = getDiffLineColor(document.get( offset, length)); diffTextViewer.setTextColor(lineColor, offset, length, true); } catch (BadLocationException e) { MercurialEclipsePlugin.logError(e); } } } private Color getDiffLineColor(String line) { Display display = this.diffTextViewer.getControl().getDisplay(); if(line.startsWith("diff ")) { return display.getSystemColor(SWT.COLOR_BLUE); } else if(line.startsWith("+++ ")) { return display.getSystemColor(SWT.COLOR_BLUE); } else if(line.startsWith(" return display.getSystemColor(SWT.COLOR_BLUE); } else if(line.startsWith("@@ ")) { return display.getSystemColor(SWT.COLOR_BLUE); } else if(line.startsWith("new file mode")) { return display.getSystemColor(SWT.COLOR_BLUE); } else if(line.startsWith("\\ ")) { return display.getSystemColor(SWT.COLOR_BLUE); } else if(line.startsWith("+")) { return display.getSystemColor(SWT.COLOR_DARK_GREEN); } else if(line.startsWith("-")) { return display.getSystemColor(SWT.COLOR_DARK_RED); } else { return display.getSystemColor(SWT.COLOR_BLACK); } } /** * @return may return null */ MercurialRevision getCurrentRevision() { return (MercurialRevision) changePathsViewer.getInput(); } /** * Create the TextViewer for the logEntry comments */ private void createText(Composite parent) { SourceViewer result = new SourceViewer(parent, null, null, true, SWT.H_SCROLL | SWT.V_SCROLL | SWT.MULTI | SWT.READ_ONLY); result.getTextWidget().setIndent(2); this.commentTextViewer = result; // Create actions for the text editor (copy and select all) final TextViewerAction copyAction = new TextViewerAction( this.commentTextViewer, ITextOperationTarget.COPY); copyAction.setText(Messages.getString("HistoryView.copy")); this.commentTextViewer .addSelectionChangedListener(new ISelectionChangedListener() { public void selectionChanged(SelectionChangedEvent event) { copyAction.update(); } }); final TextViewerAction selectAllAction = new TextViewerAction( this.commentTextViewer, ITextOperationTarget.SELECT_ALL); selectAllAction.setText(Messages.getString("HistoryView.selectAll")); IHistoryPageSite parentSite = getHistoryPageSite(); IPageSite pageSite = parentSite.getWorkbenchPageSite(); IActionBars actionBars = pageSite.getActionBars(); actionBars.setGlobalActionHandler(ITextEditorActionConstants.COPY, copyAction); actionBars.setGlobalActionHandler( ITextEditorActionConstants.SELECT_ALL, selectAllAction); actionBars.updateActionBars(); // Contribute actions to popup menu for the comments area MenuManager menuMgr = new MenuManager(); menuMgr.setRemoveAllWhenShown(true); menuMgr.addMenuListener(new IMenuListener() { public void menuAboutToShow(IMenuManager menuMgr1) { menuMgr1.add(copyAction); menuMgr1.add(selectAllAction); } }); StyledText text = this.commentTextViewer.getTextWidget(); Menu menu = menuMgr.createContextMenu(text); text.setMenu(menu); } private void contributeActions() { Action toggleShowComments = new Action(Messages .getString("HistoryView.showComments"), //$NON-NLS-1$ MercurialEclipsePlugin.getImageDescriptor(IMG_COMMENTS)) { @Override public void run() { showComments = isChecked(); setViewerVisibility(); store.setValue(PREF_SHOW_COMMENTS, showComments); } }; toggleShowComments.setChecked(showComments); Action toggleShowDiffs = new Action(Messages // TODO create new text & image .getString("HistoryView.showDiffs"), //$NON-NLS-1$ MercurialEclipsePlugin.getImageDescriptor(IMG_DIFFS)) { @Override public void run() { showDiffs = isChecked(); setViewerVisibility(); store.setValue(PREF_SHOW_DIFFS, showDiffs); } }; toggleShowDiffs.setChecked(showDiffs); // Toggle wrap comments action Action toggleWrapCommentsAction = new Action(Messages .getString("HistoryView.wrapComments")) { @Override public void run() { wrapCommentsText = isChecked(); setViewerVisibility(); store.setValue(PREF_WRAP_COMMENTS, wrapCommentsText); } }; toggleWrapCommentsAction.setChecked(wrapCommentsText); // Toggle path visible action Action toggleShowAffectedPathsAction = new Action(Messages .getString("HistoryView.showAffectedPaths"), //$NON-NLS-1$ MercurialEclipsePlugin .getImageDescriptor(IMG_AFFECTED_PATHS_FLAT_MODE)) { @Override public void run() { showAffectedPaths = isChecked(); setViewerVisibility(); store.setValue(PREF_SHOW_PATHS, showAffectedPaths); } }; toggleShowAffectedPathsAction.setChecked(showAffectedPaths); IHistoryPageSite parentSite = getHistoryPageSite(); IPageSite pageSite = parentSite.getWorkbenchPageSite(); IActionBars actionBars = pageSite.getActionBars(); // Contribute toggle text visible to the toolbar drop-down IMenuManager actionBarsMenu = actionBars.getMenuManager(); actionBarsMenu.add(toggleWrapCommentsAction); actionBarsMenu.add(new Separator()); actionBarsMenu.add(toggleShowComments); actionBarsMenu.add(toggleShowAffectedPathsAction); actionBarsMenu.add(toggleShowDiffs); actionBarsMenu.add(new Separator()); for (int i = 0; i < toggleAffectedPathsLayoutActions.length; i++) { actionBarsMenu.add(toggleAffectedPathsLayoutActions[i]); } // Create the local tool bar IToolBarManager tbm = actionBars.getToolBarManager(); tbm.add(new Separator()); tbm.add(toggleShowComments); tbm.add(toggleShowAffectedPathsAction); tbm.add(toggleShowDiffs); tbm.update(false); actionBars.updateActionBars(); final BaseSelectionListenerAction openAction = page.getOpenAction(); final BaseSelectionListenerAction openEditorAction = page.getOpenEditorAction(); final BaseSelectionListenerAction compareWithCurrent = page.getCompareWithCurrentAction(); final BaseSelectionListenerAction compareWithPrevious = page.getCompareWithPreviousAction(); final BaseSelectionListenerAction actionRevert = page.getRevertAction(); changePathsViewer.addDoubleClickListener(new IDoubleClickListener() { public void doubleClick(DoubleClickEvent event) { FileStatus fileStatus = (FileStatus) ((IStructuredSelection) event.getSelection()).getFirstElement(); MercurialRevision derived = getDerivedRevision(fileStatus, getCurrentRevision()); if(derived == null){ return; } StructuredSelection selection = new StructuredSelection(new Object[]{derived, fileStatus}); compareWithPrevious.selectionChanged(selection); compareWithPrevious.run(); } }); // Contribute actions to popup menu final MenuManager menuMgr = new MenuManager(); menuMgr.addMenuListener(new IMenuListener() { public void menuAboutToShow(IMenuManager menuMgr1) { IStructuredSelection selection = (IStructuredSelection) changePathsViewer.getSelection(); if(selection.isEmpty()){ return; } FileStatus fileStatus = (FileStatus) selection.getFirstElement(); MercurialRevision base = getCurrentRevision(); MercurialRevision derived = getDerivedRevision(fileStatus, base); if(derived == null){ // XXX currently files outside workspace are not supported... return; } selection = new StructuredSelection(derived); openAction.selectionChanged(selection); openEditorAction.selectionChanged(selection); compareWithCurrent.selectionChanged(selection); selection = new StructuredSelection(new Object[]{derived, fileStatus}); compareWithPrevious.selectionChanged(selection); menuMgr1.add(openAction); menuMgr1.add(openEditorAction); menuMgr1.add(new Separator(IWorkbenchActionConstants.GROUP_FILE)); menuMgr1.add(compareWithCurrent); menuMgr1.add(compareWithPrevious); menuMgr1.add(new Separator()); selection = new StructuredSelection(new Object[]{derived}); actionRevert.selectionChanged(selection); menuMgr1.add(actionRevert); } }); menuMgr.setRemoveAllWhenShown(true); changePathsViewer.getTable().setMenu(menuMgr.createContextMenu(changePathsViewer.getTable())); } private void setViewerVisibility() { boolean lowerPartVisible = showAffectedPaths || showComments || showDiffs; mainSashForm.setMaximizedControl(lowerPartVisible ? null : getChangesetsTableControl()); if(!lowerPartVisible) { return; } int[] weights = { showComments ? 1 : 0, showAffectedPaths ? 1 : 0, showDiffs ? 1 : 0 }; innerSashForm.setWeights(weights); commentTextViewer.getTextWidget().setWordWrap(wrapCommentsText); updatePanels(page.getTableViewer().getSelection()); } private Composite getChangesetsTableControl() { return page.getTableViewer().getControl().getParent(); } private void refreshLayout() { innerSashForm.layout(); int[] weights = mainSashForm.getWeights(); if (weights != null && weights.length == 2) { mainSashForm.setWeights(weights); } mainSashForm.layout(); } public static class ToggleAffectedPathsOptionAction extends Action { private final ChangedPathsPage page; private final String preferenceName; private final int value; public ToggleAffectedPathsOptionAction(ChangedPathsPage page, String label, String preferenceName, int value) { super(Messages.getString(label), AS_RADIO_BUTTON); this.page = page; this.preferenceName = preferenceName; this.value = value; IPreferenceStore store = MercurialEclipsePlugin.getDefault() .getPreferenceStore(); setChecked(value == store.getInt(preferenceName)); } @Override public void run() { if (isChecked()) { MercurialEclipsePlugin.getDefault().getPreferenceStore() .setValue(preferenceName, value); page.createRevisionDetailViewers(); } } } public MercurialHistoryPage getHistoryPage() { return page; } public IHistoryPageSite getHistoryPageSite() { return page.getHistoryPageSite(); } public Composite getControl() { return mainSashForm; } public boolean isShowChangePaths() { return showAffectedPaths; } public MercurialHistory getMercurialHistory() { return page.getMercurialHistory(); } /** * @return might return null, if the file is outside Eclipse workspace */ private MercurialRevision getDerivedRevision(FileStatus fileStatus, MercurialRevision base) { IFile file = ResourceUtils.getFileHandle(fileStatus.getAbsolutePath()); if(file == null){ return null; } MercurialRevision derived = new MercurialRevision(base.getChangeSet(), base .getGChangeSet(), file, null, null); return derived; } }
package org.brandonhaynes.pipegen.utilities; import javassist.CannotCompileException; import javassist.ClassPool; import javassist.Modifier; import javassist.NotFoundException; import org.brandonhaynes.pipegen.mutation.ClassModifierReplacer; import java.io.IOException; public class JvmUtilities { public static void main(String[] args) throws IOException { if(args.length != 2) System.out.println("Usage: JvmUtilities RemoveFinalFlagFromString class-name"); else for(String className: args[1].split(",")) { System.out.println(className); removeFinalFlag(className); } } public static void removeFinalFlag(String className) throws IOException { try { ClassModifierReplacer.setModifiers(new ClassPool(true), className, Modifier.PUBLIC); } catch(NotFoundException | CannotCompileException e) { throw new IOException(e); } } }
package dyvil.tools.compiler.ast.pattern; import dyvil.reflect.Opcodes; import dyvil.tools.asm.Label; import dyvil.tools.compiler.ast.classes.IClass; import dyvil.tools.compiler.ast.context.IContext; import dyvil.tools.compiler.ast.field.IDataMember; import dyvil.tools.compiler.ast.type.IType; import dyvil.tools.compiler.ast.type.builtin.Types; import dyvil.tools.compiler.ast.type.compound.TupleType; import dyvil.tools.compiler.backend.MethodWriter; import dyvil.tools.compiler.backend.exception.BytecodeException; import dyvil.tools.compiler.config.Formatting; import dyvil.tools.compiler.util.Markers; import dyvil.tools.compiler.util.Util; import dyvil.tools.parsing.Name; import dyvil.tools.parsing.marker.Marker; import dyvil.tools.parsing.marker.MarkerList; import dyvil.tools.parsing.position.ICodePosition; public final class TuplePattern extends Pattern implements IPatternList { private IPattern[] patterns = new IPattern[3]; private int patternCount; private IType tupleType; public TuplePattern(ICodePosition position) { this.position = position; } @Override public int getPatternType() { return TUPLE; } @Override public IType getType() { if (this.tupleType != null) { return this.tupleType; } TupleType t = new TupleType(this.patternCount); for (int i = 0; i < this.patternCount; i++) { t.addType(this.patterns[i].getType()); } return this.tupleType = t; } @Override public IPattern withType(IType type, MarkerList markers) { IClass tupleClass = TupleType.getTupleClass(this.patternCount); if (tupleClass == null || !tupleClass.isSubTypeOf(type)) { return null; } this.tupleType = type; for (int i = 0; i < this.patternCount; i++) { IType elementType = type.resolveTypeSafely(tupleClass.getTypeParameter(i)); IPattern pattern = this.patterns[i]; IPattern typedPattern = pattern.withType(elementType, markers); if (typedPattern == null) { Marker m = Markers.semantic(pattern.getPosition(), "pattern.tuple.element.type"); m.addInfo(Markers.getSemantic("pattern.type", pattern.getType())); m.addInfo(Markers.getSemantic("tuple.element.type", elementType)); markers.add(m); } else { this.patterns[i] = typedPattern; } } return this; } @Override public boolean isType(IType type) { return TupleType.isSuperType(type, this.patterns, this.patternCount); } @Override public int patternCount() { return this.patternCount; } @Override public void setPattern(int index, IPattern pattern) { this.patterns[index] = pattern; } @Override public void addPattern(IPattern pattern) { int index = this.patternCount++; if (this.patternCount > this.patterns.length) { IPattern[] temp = new IPattern[this.patternCount]; System.arraycopy(this.patterns, 0, temp, 0, index); this.patterns = temp; } this.patterns[index] = pattern; } @Override public IPattern getPattern(int index) { return this.patterns[index]; } @Override public IDataMember resolveField(Name name) { for (int i = 0; i < this.patternCount; i++) { IDataMember f = this.patterns[i].resolveField(name); if (f != null) { return f; } } return null; } @Override public IPattern resolve(MarkerList markers, IContext context) { if (this.patternCount == 1) { return this.patterns[0].resolve(markers, context); } for (int i = 0; i < this.patternCount; i++) { this.patterns[i] = this.patterns[i].resolve(markers, context); } return this; } @Override public void writeInvJump(MethodWriter writer, int varIndex, IType matchedType, Label elseLabel) throws BytecodeException { varIndex = IPattern.ensureVar(writer, varIndex, matchedType); final int lineNumber = this.getLineNumber(); final IClass tupleClass = this.tupleType.getTheClass(); final String internalTupleClassName = this.tupleType.getInternalName(); for (int i = 0; i < this.patternCount; i++) { if (this.patterns[i].getPatternType() == WILDCARD) { // Skip wildcard patterns continue; } writer.writeVarInsn(Opcodes.ALOAD, varIndex); matchedType.writeCast(writer, this.tupleType, lineNumber); writer.writeFieldInsn(Opcodes.GETFIELD, internalTupleClassName, "_" + (i + 1), "Ljava/lang/Object;"); final IType targetType = this.tupleType.resolveTypeSafely(tupleClass.getTypeParameter(i)); Types.OBJECT.writeCast(writer, targetType, lineNumber); this.patterns[i].writeInvJump(writer, -1, targetType, elseLabel); } } @Override public void toString(String prefix, StringBuilder buffer) { if (this.patternCount == 0) { if (Formatting.getBoolean("tuple.empty.space_between")) { buffer.append("( )"); } else { buffer.append("()"); } return; } buffer.append('('); if (Formatting.getBoolean("tuple.open_paren.space_after")) { buffer.append(' '); } Util.astToString(prefix, this.patterns, this.patternCount, Formatting.getSeparator("tuple.separator", ','), buffer); if (Formatting.getBoolean("tuple.close_paren.space_before")) { buffer.append(' '); } buffer.append(')'); } }
package org.dita.dost.module.reader; import org.apache.xerces.xni.grammars.XMLGrammarPool; import org.dita.dost.exception.DITAOTException; import org.dita.dost.exception.DITAOTXMLErrorHandler; import org.dita.dost.log.MessageUtils; import org.dita.dost.module.AbstractPipelineModuleImpl; import org.dita.dost.module.GenMapAndTopicListModule.TempFileNameScheme; import org.dita.dost.pipeline.AbstractPipelineInput; import org.dita.dost.reader.*; import org.dita.dost.util.*; import org.dita.dost.writer.DitaWriterFilter; import org.dita.dost.writer.ExportAnchorsFilter; import org.dita.dost.writer.ExportAnchorsFilter.ExportAnchor; import org.dita.dost.writer.TopicFragmentFilter; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.*; import org.xml.sax.ext.LexicalHandler; import org.xml.sax.helpers.DefaultHandler; import javax.xml.parsers.DocumentBuilder; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import javax.xml.transform.Result; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.sax.TransformerHandler; import javax.xml.transform.stream.StreamResult; import java.io.*; import java.net.URI; import java.util.*; import java.util.function.Predicate; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static org.dita.dost.reader.GenListModuleReader.*; import static org.dita.dost.util.Configuration.*; import static org.dita.dost.util.Constants.*; import static org.dita.dost.util.FileUtils.getRelativeUnixPath; import static org.dita.dost.util.FileUtils.resolve; import static org.dita.dost.util.FilterUtils.SUBJECT_SCHEME_EXTENSION; import static org.dita.dost.util.Job.*; import static org.dita.dost.util.URLUtils.*; import static org.dita.dost.util.XMLUtils.close; /** * Base class for document reader and serializer. * * @since 2.4 */ public abstract class AbstractReaderModule extends AbstractPipelineModuleImpl { public static final String ELEMENT_STUB = "stub"; Predicate<String> formatFilter; /** FileInfos keyed by src. */ private final Map<URI, FileInfo> fileinfos = new HashMap<>(); /** Set of all topic files */ private final Set<URI> fullTopicSet = new HashSet<>(128); /** Set of all map files */ private final Set<URI> fullMapSet = new HashSet<>(128); /** Set of topic files containing href */ private final Set<URI> hrefTopicSet = new HashSet<>(128); /** Set of dita files containing conref */ private final Set<URI> conrefSet = new HashSet<>(128); /** Set of topic files containing coderef */ private final Set<URI> coderefSet = new HashSet<>(128); /** Set of all images */ private final Set<Reference> formatSet = new HashSet<>(); /** Set of all images used for flagging */ private final Set<URI> flagImageSet = new LinkedHashSet<>(128); /** Set of all HTML and other non-DITA or non-image files */ private final Set<URI> htmlSet = new HashSet<>(128); /** Set of all the href targets */ private final Set<URI> hrefTargetSet = new HashSet<>(128); /** Set of all the conref targets */ private Set<URI> conrefTargetSet = new HashSet<>(128); /** Set of all the non-conref targets */ private final Set<URI> nonConrefCopytoTargetSet = new HashSet<>(128); /** Set of subsidiary files */ private final Set<URI> coderefTargetSet = new HashSet<>(16); /** Set of absolute flag image files */ private final Set<URI> relFlagImagesSet = new LinkedHashSet<>(128); /** List of files waiting for parsing. Values are absolute URI references. */ private final Queue<Reference> waitList = new LinkedList<>(); /** List of parsed files */ final List<URI> doneList = new LinkedList<>(); final List<URI> failureList = new LinkedList<>(); /** Set of outer dita files */ private final Set<URI> outDitaFilesSet = new HashSet<>(128); /** Set of sources of conacion */ private final Set<URI> conrefpushSet = new HashSet<>(128); /** Set of files containing keyref */ private final Set<URI> keyrefSet = new HashSet<>(128); /** Set of files with "@processing-role=resource-only" */ private final Set<URI> resourceOnlySet = new HashSet<>(128); /** Absolute basedir for processing */ private URI baseInputDir; // /** Number of directory levels base directory is adjusted. */ // private int uplevels = 0; GenListModuleReader listFilter; KeydefFilter keydefFilter; ExportAnchorsFilter exportAnchorsFilter; boolean validate = true; ContentHandler nullHandler; private TempFileNameScheme tempFileNameScheme; /** Absolute path to input file. */ URI rootFile; /** Subject scheme key map. Key is key value, value is key definition. */ private Map<String, KeyDef> schemekeydefMap; /** Subject scheme absolute file paths. */ private final Set<URI> schemeSet = new HashSet<>(128); /** Subject scheme usage. Key is absolute file path, value is set of applicable subject schemes. */ private final Map<URI, Set<URI>> schemeDictionary = new HashMap<>(); private final Map<URI, URI> copyTo = new HashMap<>(); private boolean setSystemid = true; Mode processingMode; /** Generate {@code xtrf} and {@code xtrc} attributes */ boolean genDebugInfo; /** use grammar pool cache */ private boolean gramcache = true; private boolean setSystemId; /** Profiling is enabled. */ private boolean profilingEnabled; String transtype; /** Absolute DITA-OT base path. */ File ditaDir; private File ditavalFile; FilterUtils filterUtils; /** Absolute path to current destination file. */ File outputFile; Map<String, Map<String, Set<String>>> validateMap; Map<String, Map<String, String>> defaultValueMap; /** XMLReader instance for parsing dita file */ private XMLReader reader; /** Absolute path to current source file. */ URI currentFile; private Map<URI, Set<URI>> dic; private SubjectSchemeReader subjectSchemeReader; private FilterUtils baseFilterUtils; DitaWriterFilter ditaWriterFilter; TopicFragmentFilter topicFragmentFilter; public AbstractReaderModule() { } public abstract void readStartFile() throws DITAOTException; /** * Initialize reusable filters. */ void initFilters() { listFilter = new GenListModuleReader(); listFilter.setLogger(logger); listFilter.setPrimaryDitamap(rootFile); listFilter.setJob(job); if (profilingEnabled) { filterUtils = parseFilterFile(); } exportAnchorsFilter = new ExportAnchorsFilter(); exportAnchorsFilter.setInputFile(rootFile); keydefFilter = new KeydefFilter(); keydefFilter.setLogger(logger); keydefFilter.setCurrentFile(rootFile); keydefFilter.setJob(job); nullHandler = new DefaultHandler(); ditaWriterFilter = new DitaWriterFilter(); ditaWriterFilter.setLogger(logger); ditaWriterFilter.setJob(job); ditaWriterFilter.setEntityResolver(reader.getEntityResolver()); topicFragmentFilter = new TopicFragmentFilter(ATTRIBUTE_NAME_CONREF, ATTRIBUTE_NAME_CONREFEND); tempFileNameScheme.setBaseDir(job.getInputDir()); } /** * Init xml reader used for pipeline parsing. * * @param ditaDir absolute path to DITA-OT directory * @param validate whether validate input file * @throws SAXException parsing exception */ void initXMLReader(final File ditaDir, final boolean validate) throws SAXException { reader = XMLUtils.getXMLReader(); reader.setFeature(FEATURE_NAMESPACE, true); reader.setFeature(FEATURE_NAMESPACE_PREFIX, true); if (validate) { reader.setFeature(FEATURE_VALIDATION, true); try { reader.setFeature(FEATURE_VALIDATION_SCHEMA, true); } catch (final SAXNotRecognizedException e) { // Not Xerces, ignore exception } } else { final String msg = MessageUtils.getInstance().getMessage("DOTJ037W").toString(); logger.warn(msg); } if (gramcache) { final XMLGrammarPool grammarPool = GrammarPoolManager.getGrammarPool(); try { reader.setProperty("http://apache.org/xml/properties/internal/grammar-pool", grammarPool); logger.info("Using Xerces grammar pool for DTD and schema caching."); } catch (final NoClassDefFoundError e) { logger.debug("Xerces not available, not using grammar caching"); } catch (final SAXNotRecognizedException | SAXNotSupportedException e) { logger.warn("Failed to set Xerces grammar pool for parser: " + e.getMessage()); } } CatalogUtils.setDitaDir(ditaDir); reader.setEntityResolver(CatalogUtils.getCatalogResolver()); } void parseInputParameters(final AbstractPipelineInput input) { ditaDir = toFile(input.getAttribute(ANT_INVOKER_EXT_PARAM_DITADIR)); if (!ditaDir.isAbsolute()) { throw new IllegalArgumentException("DITA-OT installation directory " + ditaDir + " must be absolute"); } validate = Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAM_VALIDATE)); transtype = input.getAttribute(ANT_INVOKER_EXT_PARAM_TRANSTYPE); gramcache = "yes".equalsIgnoreCase(input.getAttribute(ANT_INVOKER_EXT_PARAM_GRAMCACHE)); setSystemid = "yes".equalsIgnoreCase(input.getAttribute(ANT_INVOKER_EXT_PARAN_SETSYSTEMID)); processingMode = Optional.ofNullable(input.getAttribute(ANT_INVOKER_EXT_PARAM_PROCESSING_MODE)) .map(String::toUpperCase) .map(Mode::valueOf) .orElse(Mode.LAX); genDebugInfo = Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAM_GENERATE_DEBUG_ATTR)); // For the output control job.setGeneratecopyouter(input.getAttribute(ANT_INVOKER_EXT_PARAM_GENERATECOPYOUTTER)); job.setOutterControl(input.getAttribute(ANT_INVOKER_EXT_PARAM_OUTTERCONTROL)); job.setOnlyTopicInMap(Boolean.valueOf(input.getAttribute(ANT_INVOKER_EXT_PARAM_ONLYTOPICINMAP))); // Set the OutputDir final File path = toFile(input.getAttribute(ANT_INVOKER_EXT_PARAM_OUTPUTDIR)); if (path.isAbsolute()) { job.setOutputDir(path); } else { throw new IllegalArgumentException("Output directory " + path + " must be absolute"); } final File basedir = toFile(input.getAttribute(ANT_INVOKER_PARAM_BASEDIR)); final URI ditaInputDir = toURI(input.getAttribute(ANT_INVOKER_EXT_PARAM_INPUTDIR)); if (ditaInputDir != null) { if (ditaInputDir.isAbsolute()) { baseInputDir = ditaInputDir; } else if (ditaInputDir.getPath() != null && ditaInputDir.getPath().startsWith(URI_SEPARATOR)) { baseInputDir = setScheme(ditaInputDir, "file"); } else { // XXX Shouldn't this be resolved to current directory, not Ant script base directory? baseInputDir = basedir.toURI().resolve(ditaInputDir); } assert baseInputDir.isAbsolute(); } URI ditaInput = toURI(input.getAttribute(ANT_INVOKER_PARAM_INPUTMAP)); ditaInput = ditaInput != null ? ditaInput : job.getInputFile(); if (ditaInput.isAbsolute()) { rootFile = ditaInput; } else if (ditaInput.getPath() != null && ditaInput.getPath().startsWith(URI_SEPARATOR)) { rootFile = setScheme(ditaInput, "file"); } else if (baseInputDir != null) { rootFile = baseInputDir.resolve(ditaInput); } else { rootFile = basedir.toURI().resolve(ditaInput); } assert rootFile.isAbsolute(); job.setInputFile(rootFile); if (baseInputDir == null) { baseInputDir = rootFile.resolve("."); } assert baseInputDir.isAbsolute(); profilingEnabled = Optional.ofNullable(input.getAttribute(ANT_INVOKER_PARAM_PROFILING_ENABLED)) .map(Boolean::parseBoolean) .orElse(true); if (profilingEnabled) { ditavalFile = toFile(input.getAttribute(ANT_INVOKER_PARAM_DITAVAL)); if (ditavalFile != null && !ditavalFile.isAbsolute()) { // XXX Shouldn't this be resolved to current directory, not Ant script base directory? ditavalFile = new File(basedir, ditavalFile.getPath()).getAbsoluteFile(); } } // create the keydef file for scheme files schemekeydefMap = new HashMap<>(); } void processWaitList() throws DITAOTException { while (!waitList.isEmpty()) { processFile(waitList.remove(), null); } } /** * Get pipe line filters * * @param fileToParse absolute path to current file being processed */ abstract List<XMLFilter> getProcessingPipe(final URI fileToParse); /** * Read a file and process it for list information. * * @param ref system path of the file to process * @param parseFile file to parse, may be {@code null} * @throws DITAOTException if processing failed */ void processFile(final Reference ref, final URI parseFile) throws DITAOTException { currentFile = ref.filename; assert currentFile.isAbsolute(); final URI src = parseFile != null ? parseFile : currentFile; assert src.isAbsolute(); final URI rel = baseInputDir.relativize(currentFile); // FIXME: doesn't work with uplevels without hashed temporary files outputFile = new File(job.tempDirURI.resolve(rel)); validateMap = Collections.EMPTY_MAP; // defaultValueMap = Collections.EMPTY_MAP; logger.info("Processing " + currentFile); final String[] params = { currentFile.toString() }; try { XMLReader xmlSource = getXmlReader(ref.format); for (final XMLFilter f: getProcessingPipe(currentFile)) { f.setParent(xmlSource); f.setEntityResolver(CatalogUtils.getCatalogResolver()); xmlSource = f; } xmlSource.setContentHandler(nullHandler); xmlSource.parse(src.toString()); if (listFilter.isValidInput()) { processParseResult(currentFile); categorizeCurrentFile(ref); } else if (!currentFile.equals(rootFile)) { logger.warn(MessageUtils.getInstance().getMessage("DOTJ021W", params).toString()); failureList.add(currentFile); } } catch (final RuntimeException e) { throw e; } catch (final SAXParseException sax) { final Exception inner = sax.getException(); if (inner != null && inner instanceof DITAOTException) { throw (DITAOTException) inner; } if (currentFile.equals(rootFile)) { throw new DITAOTException(MessageUtils.getInstance().getMessage("DOTJ012F", params).toString() + ": " + sax.getMessage(), sax); } else if (processingMode == Mode.STRICT) { throw new DITAOTException(MessageUtils.getInstance().getMessage("DOTJ013E", params).toString() + ": " + sax.getMessage(), sax); } else { logger.error(MessageUtils.getInstance().getMessage("DOTJ013E", params).toString() + ": " + sax.getMessage(), sax); } failureList.add(currentFile); } catch (final FileNotFoundException e) { if (currentFile.equals(rootFile)) { throw new DITAOTException(MessageUtils.getInstance().getMessage("DOTA069F", params).toString(), e); } else if (processingMode == Mode.STRICT) { throw new DITAOTException(MessageUtils.getInstance().getMessage("DOTX008E", params).toString() + ": " + e.getMessage(), e); } else { logger.error(MessageUtils.getInstance().getMessage("DOTX008E", params).toString()); } failureList.add(currentFile); } catch (final Exception e) { if (currentFile.equals(rootFile)) { throw new DITAOTException(MessageUtils.getInstance().getMessage("DOTJ012F", params).toString() + ": " + e.getMessage(), e); } else if (processingMode == Mode.STRICT) { throw new DITAOTException(MessageUtils.getInstance().getMessage("DOTJ013E", params).toString() + ": " + e.getMessage(), e); } else { logger.error(MessageUtils.getInstance().getMessage("DOTJ013E", params).toString() + ": " + e.getMessage(), e); } failureList.add(currentFile); } if (!listFilter.isValidInput() && currentFile.equals(rootFile)) { if (validate) { // stop the build if all content in the input file was filtered out. throw new DITAOTException(MessageUtils.getInstance().getMessage("DOTJ022F", params).toString()); } else { // stop the build if the content of the file is not valid. throw new DITAOTException(MessageUtils.getInstance().getMessage("DOTJ034F", params).toString()); } } doneList.add(currentFile); listFilter.reset(); keydefFilter.reset(); } /** * Process results from parsing a single topic * * @param currentFile absolute URI processes files */ void processParseResult(final URI currentFile) { // Category non-copyto result and update uplevels accordingly for (final Reference file: listFilter.getNonCopytoResult()) { categorizeReferenceFile(file); // updateUplevels(file.filename); } for (final Map.Entry<URI, URI> e : listFilter.getCopytoMap().entrySet()) { final URI source = e.getValue(); final URI target = e.getKey(); copyTo.put(target, source); // updateUplevels(target); } schemeSet.addAll(listFilter.getSchemeRefSet()); // collect key definitions for (final Map.Entry<String, KeyDef> e: keydefFilter.getKeysDMap().entrySet()) { // key and value.keys will differ when keydef is a redirect to another keydef final String key = e.getKey(); final KeyDef value = e.getValue(); if (schemeSet.contains(currentFile)) { schemekeydefMap.put(key, new KeyDef(key, value.href, value.scope, value.format, currentFile, null)); } } hrefTargetSet.addAll(listFilter.getHrefTargets()); conrefTargetSet.addAll(listFilter.getConrefTargets()); nonConrefCopytoTargetSet.addAll(listFilter.getNonConrefCopytoTargets()); coderefTargetSet.addAll(listFilter.getCoderefTargets()); outDitaFilesSet.addAll(listFilter.getOutFilesSet()); // Generate topic-scheme dictionary final Set<URI> schemeSet = listFilter.getSchemeSet(); if (schemeSet != null && !schemeSet.isEmpty()) { Set<URI> children = schemeDictionary.get(currentFile); if (children == null) { children = new HashSet<>(); } children.addAll(schemeSet); schemeDictionary.put(currentFile, children); final Set<URI> hrfSet = listFilter.getHrefTargets(); for (final URI filename: hrfSet) { children = schemeDictionary.get(filename); if (children == null) { children = new HashSet<>(); } children.addAll(schemeSet); schemeDictionary.put(filename, children); } } } /** * Categorize current file type * * @param ref file path */ void categorizeCurrentFile(final Reference ref) { final URI currentFile = ref.filename; if (listFilter.hasConaction()) { conrefpushSet.add(currentFile); } if (listFilter.hasConRef()) { conrefSet.add(currentFile); } if (listFilter.hasKeyRef()) { keyrefSet.add(currentFile); } if (listFilter.hasCodeRef()) { coderefSet.add(currentFile); } if (listFilter.isDitaTopic()) { if (ref.format != null && !ref.format.equals(ATTR_FORMAT_VALUE_DITA)) { assert currentFile.getFragment() == null; final URI f = currentFile.normalize(); if (!fileinfos.containsKey(f)) { final FileInfo i = new FileInfo.Builder() //.uri(tempFileNameScheme.generateTempFileName(currentFile)) .src(currentFile) .format(ref.format) .build(); fileinfos.put(i.src, i); } } fullTopicSet.add(currentFile); hrefTargetSet.add(currentFile); if (listFilter.hasHref()) { hrefTopicSet.add(currentFile); } } else if (listFilter.isDitaMap()) { fullMapSet.add(currentFile); } } /** * Categorize file. * * @param file file system path with optional format */ private void categorizeReferenceFile(final Reference file) { // avoid files referred by coderef being added into wait list if (listFilter.getCoderefTargets().contains(file.filename)) { return; } if (formatFilter.test(file.format)) { if (isFormatDita(file.format) || ATTR_FORMAT_VALUE_DITAMAP.equals(file.format)) { addToWaitList(file); } else if (ATTR_FORMAT_VALUE_IMAGE.equals(file.format)) { formatSet.add(file); if (!exists(file.filename)) { logger.warn(MessageUtils.getInstance().getMessage("DOTX008W", file.filename.toString()).toString()); } } else if (ATTR_FORMAT_VALUE_DITAVAL.equals(file.format)) { formatSet.add(file); } else { htmlSet.add(file.filename); } } } // /** // * Update uplevels if needed. If the parameter contains a {@link Constants#STICK STICK}, it and // * anything following it is removed. // * // * @param file file path // */ // private void updateUplevels(final URI file) { // assert file.isAbsolute(); // if (file.getPath() != null) { // final URI f = file.toString().contains(STICK) // ? toURI(file.toString().substring(0, file.toString().indexOf(STICK))) // : file; // final URI relative = getRelativePath(rootFile, f).normalize(); // final int lastIndex = relative.getPath().lastIndexOf(".." + URI_SEPARATOR); // if (lastIndex != -1) { // final int newUplevels = lastIndex / 3 + 1; // uplevels = Math.max(newUplevels, uplevels); /** * Add the given file the wait list if it has not been parsed. * * @param ref reference to absolute system path */ void addToWaitList(final Reference ref) { final URI file = ref.filename; assert file.isAbsolute() && file.getFragment() == null; if (doneList.contains(file) || waitList.contains(ref) || file.equals(currentFile)) { return; } waitList.add(ref); } // /** // * Update base directory and prefix based on uplevels. // */ // void updateBaseDirectory() { // for (int i = uplevels; i > 0; i--) { // baseInputDir = baseInputDir.resolve(".."); /** * Get up-levels absolute path. * * @param rootTemp relative URI for temporary root file * @return path to up-level, e.g. {@code ../../}, may be empty string */ private String getLevelsPath(final URI rootTemp) { final int u = rootTemp.toString().split(URI_SEPARATOR).length - 1; if (u == 0) { return ""; } final StringBuilder buff = new StringBuilder(); for (int current = u; current > 0; current buff.append("..").append(File.separator); } return buff.toString(); } /** * Parse filter file * * @return configured filter utility */ private FilterUtils parseFilterFile() { Map<FilterUtils.FilterKey, FilterUtils.Action> filterMap; if (ditavalFile != null) { final DitaValReader ditaValReader = new DitaValReader(); ditaValReader.setLogger(logger); ditaValReader.initXMLReader(setSystemid); ditaValReader.read(ditavalFile.getAbsoluteFile()); // Store filter map for later use filterMap = ditaValReader.getFilterMap(); // Store flagging image used for image copying flagImageSet.addAll(ditaValReader.getImageList()); relFlagImagesSet.addAll(ditaValReader.getRelFlagImageList()); } else { filterMap = emptyMap(); } final FilterUtils filterUtils = new FilterUtils(printTranstype.contains(transtype), filterMap); filterUtils.setLogger(logger); return filterUtils; } /** * Handle topic which are only conref sources from normal processing. */ void handleConref() { // Get pure conref targets final Set<URI> pureConrefTargets = new HashSet<>(128); for (final URI target: conrefTargetSet) { if (!nonConrefCopytoTargetSet.contains(target)) { pureConrefTargets.add(target); } } conrefTargetSet = pureConrefTargets; // Remove pure conref targets from fullTopicSet fullTopicSet.removeAll(pureConrefTargets); } /** * Write result files. * * @throws DITAOTException if writing result files failed */ void outputResult() throws DITAOTException { tempFileNameScheme.setBaseDir(baseInputDir); // assume empty Job final URI rootTemp = tempFileNameScheme.generateTempFileName(rootFile); final File relativeRootFile = toFile(rootTemp); if (baseInputDir.getScheme().equals("file")) { job.setProperty(INPUT_DIR, new File(baseInputDir).getAbsolutePath()); } job.setProperty(INPUT_DIR_URI, baseInputDir.toString()); job.setProperty(INPUT_DITAMAP, relativeRootFile.toString()); job.setProperty(INPUT_DITAMAP_URI, rootTemp.toString()); job.setProperty(INPUT_DITAMAP_LIST_FILE_LIST, USER_INPUT_FILE_LIST_FILE); final File inputfile = new File(job.tempDir, USER_INPUT_FILE_LIST_FILE); writeListFile(inputfile, relativeRootFile.toString()); job.setProperty("tempdirToinputmapdir.relative.value", StringUtils.escapeRegExp(getPrefix(relativeRootFile))); job.setProperty("uplevels", getLevelsPath(rootTemp)); resourceOnlySet.addAll(listFilter.getResourceOnlySet()); for (final URI file: outDitaFilesSet) { getOrCreateFileInfo(fileinfos, file).isOutDita = true; } for (final URI file: fullTopicSet) { final FileInfo ff = getOrCreateFileInfo(fileinfos, file); if (ff.format == null) { ff.format = ATTR_FORMAT_VALUE_DITA; } } for (final URI file: fullMapSet) { final FileInfo ff = getOrCreateFileInfo(fileinfos, file); if (ff.format == null) { ff.format = ATTR_FORMAT_VALUE_DITAMAP; } } for (final URI file: hrefTopicSet) { getOrCreateFileInfo(fileinfos, file).hasLink = true; } for (final URI file: conrefSet) { getOrCreateFileInfo(fileinfos, file).hasConref = true; } for (final Reference file: formatSet) { getOrCreateFileInfo(fileinfos, file.filename).format = file.format; } for (final URI file: flagImageSet) { final FileInfo f = getOrCreateFileInfo(fileinfos, file); f.isFlagImage = true; f.format = ATTR_FORMAT_VALUE_IMAGE; } for (final URI file: htmlSet) { getOrCreateFileInfo(fileinfos, file).format = ATTR_FORMAT_VALUE_HTML; } for (final URI file: hrefTargetSet) { getOrCreateFileInfo(fileinfos, file).isTarget = true; } for (final URI file: schemeSet) { getOrCreateFileInfo(fileinfos, file).isSubjectScheme = true; } for (final URI file: coderefTargetSet) { final FileInfo f = getOrCreateFileInfo(fileinfos, file); f.isSubtarget = true; if (f.format == null) { f.format = PR_D_CODEREF.localName; } } for (final URI file: conrefpushSet) { getOrCreateFileInfo(fileinfos, file).isConrefPush = true; } for (final URI file: keyrefSet) { getOrCreateFileInfo(fileinfos, file).hasKeyref = true; } for (final URI file: coderefSet) { getOrCreateFileInfo(fileinfos, file).hasCoderef = true; } for (final URI file: resourceOnlySet) { getOrCreateFileInfo(fileinfos, file).isResourceOnly = true; } addFlagImagesSetToProperties(job, relFlagImagesSet); final Map<URI, URI> filteredCopyTo = filterConflictingCopyTo(copyTo, fileinfos.values()); for (final FileInfo fs: fileinfos.values()) { if (!failureList.contains(fs.src)) { // if (job.getFileInfo(fs.uri) != null) { // logger.info("Already in job:" + fs.uri); // if (formatFilter.test(fs.format)) { final URI src = filteredCopyTo.get(fs.src); // correct copy-to if (src != null) { final FileInfo corr = new FileInfo.Builder(fs).src(src).build(); job.add(corr); } else { job.add(fs); } // } else { // logger.info("skip " + fs.src + " -> " + fs.uri); } } for (final URI target : filteredCopyTo.keySet()) { final URI tmp = tempFileNameScheme.generateTempFileName(target); final FileInfo fi = new FileInfo.Builder().result(target).uri(tmp).build(); if (formatFilter.test(fi.format)) { job.add(fi); } } try { logger.info("Serializing job specification"); if (!job.tempDir.exists() && !job.tempDir.mkdirs()) { throw new DITAOTException("Failed to create " + job.tempDir + " directory"); } job.write(); } catch (final IOException e) { throw new DITAOTException("Failed to serialize job configuration files: " + e.getMessage(), e); } try { SubjectSchemeReader.writeMapToXML(addMapFilePrefix(listFilter.getRelationshipGrap()), new File(job.tempDir, FILE_NAME_SUBJECT_RELATION)); SubjectSchemeReader.writeMapToXML(addMapFilePrefix(schemeDictionary), new File(job.tempDir, FILE_NAME_SUBJECT_DICTIONARY)); } catch (final IOException e) { throw new DITAOTException("Failed to serialize subject scheme files: " + e.getMessage(), e); } writeExportAnchors(); KeyDef.writeKeydef(new File(job.tempDir, SUBJECT_SCHEME_KEYDEF_LIST_FILE), addFilePrefix(schemekeydefMap.values())); } /** Filter copy-to where target is used directly. */ private Map<URI, URI> filterConflictingCopyTo( final Map<URI, URI> copyTo, final Collection<FileInfo> fileInfos) { final Set<URI> fileinfoTargets = fileInfos.stream() .filter(fi -> fi.src.equals(fi.result)) .map(fi -> fi.result) .collect(Collectors.toSet()); return copyTo.entrySet().stream() .filter(e -> !fileinfoTargets.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } /** * Write list file. * @param inputfile output list file * @param relativeRootFile list value */ private void writeListFile(final File inputfile, final String relativeRootFile) { Writer bufferedWriter = null; try { bufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(inputfile))); bufferedWriter.write(relativeRootFile); bufferedWriter.flush(); } catch (final IOException e) { logger.error(e.getMessage(), e) ; } finally { if (bufferedWriter != null) { try { bufferedWriter.close(); } catch (final IOException e) { logger.error(e.getMessage(), e) ; } } } } /** * Prefix path. * * @param relativeRootFile relative path for root temporary file * @return either an empty string or a path which ends in {@link File#separator File.separator} * */ private String getPrefix(final File relativeRootFile) { String res; final File p = relativeRootFile.getParentFile(); if (p != null) { res = p.toString() + File.separator; } else { res = ""; } return res; } private FileInfo getOrCreateFileInfo(final Map<URI, FileInfo> fileInfos, final URI file) { assert file.getFragment() == null; final URI f = file.normalize(); FileInfo.Builder b; if (fileInfos.containsKey(f)) { b = new FileInfo.Builder(fileInfos.get(f)); } else { b = new FileInfo.Builder().src(file); } b = b.uri(tempFileNameScheme.generateTempFileName(file)); final FileInfo i = b.build(); fileInfos.put(i.src, i); return i; } private void writeExportAnchors() throws DITAOTException { if (INDEX_TYPE_ECLIPSEHELP.equals(transtype)) { // Output plugin id final File pluginIdFile = new File(job.tempDir, FILE_NAME_PLUGIN_XML); final DelayConrefUtils delayConrefUtils = new DelayConrefUtils(); delayConrefUtils.writeMapToXML(exportAnchorsFilter.getPluginMap(), pluginIdFile); OutputStream exportStream = null; XMLStreamWriter export = null; try { exportStream = new FileOutputStream(new File(job.tempDir, FILE_NAME_EXPORT_XML)); export = XMLOutputFactory.newInstance().createXMLStreamWriter(exportStream, "UTF-8"); export.writeStartDocument(); export.writeStartElement("stub"); for (final ExportAnchor e: exportAnchorsFilter.getExportAnchors()) { export.writeStartElement("file"); export.writeAttribute("name", tempFileNameScheme.generateTempFileName(toFile(e.file).toURI()).toString()); for (final String t: sort(e.topicids)) { export.writeStartElement("topicid"); export.writeAttribute("name", t); export.writeEndElement(); } for (final String i: sort(e.ids)) { export.writeStartElement("id"); export.writeAttribute("name", i); export.writeEndElement(); } for (final String k: sort(e.keys)) { export.writeStartElement("keyref"); export.writeAttribute("name", k); export.writeEndElement(); } export.writeEndElement(); } export.writeEndElement(); export.writeEndDocument(); } catch (final FileNotFoundException e) { throw new DITAOTException("Failed to write export anchor file: " + e.getMessage(), e); } catch (final XMLStreamException e) { throw new DITAOTException("Failed to serialize export anchor file: " + e.getMessage(), e); } finally { if (export != null) { try { export.close(); } catch (final XMLStreamException e) { logger.error("Failed to close export anchor file: " + e.getMessage(), e); } } if (exportStream != null) { try { exportStream.close(); } catch (final IOException e) { logger.error("Failed to close export anchor file: " + e.getMessage(), e); } } } } } private List<String> sort(final Set<String> set) { final List<String> sorted = new ArrayList<>(set); Collections.sort(sorted); return sorted; } /** * Convert absolute paths to relative temporary directory paths * @return map with relative keys and values */ private Map<URI, Set<URI>> addMapFilePrefix(final Map<URI, Set<URI>> map) { final Map<URI, Set<URI>> res = new HashMap<>(); for (final Map.Entry<URI, Set<URI>> e: map.entrySet()) { final URI key = e.getKey(); final Set<URI> newSet = new HashSet<>(e.getValue().size()); for (final URI file: e.getValue()) { newSet.add(tempFileNameScheme.generateTempFileName(file)); } res.put(key.equals(ROOT_URI) ? key : tempFileNameScheme.generateTempFileName(key), newSet); } return res; } /** * Add file prefix. For absolute paths the prefix is not added. * * @param set file paths * @return file paths with prefix */ private Map<URI, URI> addFilePrefix(final Map<URI, URI> set) { final Map<URI, URI> newSet = new HashMap<>(); for (final Map.Entry<URI, URI> file: set.entrySet()) { final URI key = tempFileNameScheme.generateTempFileName(file.getKey()); final URI value = tempFileNameScheme.generateTempFileName(file.getValue()); newSet.put(key, value); } return newSet; } private Collection<KeyDef> addFilePrefix(final Collection<KeyDef> keydefs) { final Collection<KeyDef> res = new ArrayList<>(keydefs.size()); for (final KeyDef k: keydefs) { final URI source = tempFileNameScheme.generateTempFileName(k.source); res.add(new KeyDef(k.keys, k.href, k.scope, k.format, source, null)); } return res; } /** * add FlagImangesSet to Properties, which needn't to change the dir level, * just ouput to the ouput dir. * * @param prop job configuration * @param set absolute flag image files */ private void addFlagImagesSetToProperties(final Job prop, final Set<URI> set) { final Set<URI> newSet = new LinkedHashSet<>(128); for (final URI file: set) { // assert file.isAbsolute(); if (file.isAbsolute()) { // no need to append relative path before absolute paths newSet.add(file.normalize()); } else { // In ant, all the file separator should be slash, so we need to // replace all the back slash with slash. newSet.add(file.normalize()); } } // write list attribute to file final String fileKey = Constants.REL_FLAGIMAGE_LIST.substring(0, Constants.REL_FLAGIMAGE_LIST.lastIndexOf("list")) + "file"; prop.setProperty(fileKey, Constants.REL_FLAGIMAGE_LIST.substring(0, Constants.REL_FLAGIMAGE_LIST.lastIndexOf("list")) + ".list"); final File list = new File(job.tempDir, prop.getProperty(fileKey)); Writer bufferedWriter = null; try { bufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(list))); final Iterator<URI> it = newSet.iterator(); while (it.hasNext()) { bufferedWriter.write(it.next().getPath()); if (it.hasNext()) { bufferedWriter.write("\n"); } } bufferedWriter.flush(); bufferedWriter.close(); } catch (final IOException e) { logger.error(e.getMessage(), e) ; } finally { if (bufferedWriter != null) { try { bufferedWriter.close(); } catch (final IOException e) { logger.error(e.getMessage(), e) ; } } } prop.setProperty(Constants.REL_FLAGIMAGE_LIST, StringUtils.join(newSet, COMMA)); } void processFile(final FileInfo f) { currentFile = f.src; if (f.src == null || !exists(f.src) || !f.src.equals(f.result)) { logger.warn("Ignoring a copy-to file " + f.result); return; } outputFile = new File(job.tempDir, f.file.getPath()); final File outputDir = outputFile.getParentFile(); if (!outputDir.exists() && !outputDir.mkdirs()) { logger.error("Failed to create output directory " + outputDir.getAbsolutePath()); return; } logger.info("Processing " + f.src + " to " + outputFile.toURI()); final Set<URI> schemaSet = dic.get(f.uri); if (schemaSet != null && !schemaSet.isEmpty()) { logger.debug("Loading subject schemes"); subjectSchemeReader.reset(); for (final URI schema : schemaSet) { subjectSchemeReader.loadSubjectScheme(new File(job.tempDirURI.resolve(schema.getPath() + SUBJECT_SCHEME_EXTENSION))); } validateMap = subjectSchemeReader.getValidValuesMap(); defaultValueMap = subjectSchemeReader.getDefaultValueMap(); } else { validateMap = emptyMap(); defaultValueMap = emptyMap(); } if (profilingEnabled) { filterUtils = baseFilterUtils.refine(subjectSchemeReader.getSubjectSchemeMap()); } InputSource in = null; Result out = null; try { reader.setErrorHandler(new DITAOTXMLErrorHandler(currentFile.toString(), logger)); final TransformerFactory tf = TransformerFactory.newInstance(); final SAXTransformerFactory stf = (SAXTransformerFactory) tf; final TransformerHandler serializer = stf.newTransformerHandler(); XMLReader parser = getXmlReader(f.format); XMLReader xmlSource = parser; for (final XMLFilter filter: getProcessingPipe(currentFile)) { filter.setParent(xmlSource); xmlSource = filter; } // ContentHandler must be reset so e.g. Saxon 9.1 will reassign ContentHandler // when reusing filter with multiple Transformers. xmlSource.setContentHandler(null); try { final LexicalHandler lexicalHandler = new DTDForwardHandler(xmlSource); parser.setProperty("http://xml.org/sax/properties/lexical-handler", lexicalHandler); parser.setFeature("http://xml.org/sax/features/lexical-handler", true); } catch (final SAXNotRecognizedException e) {} in = new InputSource(f.src.toString()); out = new StreamResult(new FileOutputStream(outputFile)); serializer.setResult(out); xmlSource.setContentHandler(serializer); xmlSource.parse(new InputSource(f.src.toString())); } catch (final RuntimeException e) { throw e; } catch (final Exception e) { logger.error(e.getMessage(), e) ; } finally { try { close(out); } catch (final Exception e) { logger.error(e.getMessage(), e) ; } try { close(in); } catch (final IOException e) { logger.error(e.getMessage(), e) ; } } if (isFormatDita(f.format)) { f.format = ATTR_FORMAT_VALUE_DITA; } } private XMLReader getXmlReader(final String format) throws SAXException { for (final Map.Entry<String, String> e: parserMap.entrySet()) { if (format != null && format.equals(e.getKey())) { try { return (XMLReader) this.getClass().forName(e.getValue()).newInstance(); } catch (final InstantiationException | ClassNotFoundException | IllegalAccessException ex) { throw new SAXException(ex); } } } return reader; } void init() throws IOException, DITAOTException, SAXException { try { final String cls = Optional .ofNullable(job.getProperty("temp-file-name-scheme")) .orElse(configuration.get("temp-file-name-scheme")); tempFileNameScheme = (TempFileNameScheme) getClass().forName(cls).newInstance(); } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) { throw new RuntimeException(e); } tempFileNameScheme.setBaseDir(job.getInputDir()); // Output subject schemas outputSubjectScheme(); subjectSchemeReader = new SubjectSchemeReader(); subjectSchemeReader.setLogger(logger); subjectSchemeReader.setJob(job); dic = SubjectSchemeReader.readMapFromXML(new File(job.tempDir, FILE_NAME_SUBJECT_DICTIONARY)); if (profilingEnabled) { final DitaValReader filterReader = new DitaValReader(); filterReader.setLogger(logger); filterReader.setJob(job); filterReader.initXMLReader(setSystemId); Map<FilterUtils.FilterKey, FilterUtils.Action> filterMap; if (ditavalFile != null) { filterReader.read(ditavalFile.getAbsoluteFile()); filterMap = filterReader.getFilterMap(); } else { filterMap = Collections.EMPTY_MAP; } baseFilterUtils = new FilterUtils(printTranstype.contains(transtype), filterMap); baseFilterUtils.setLogger(logger); } initXMLReader(ditaDir, validate); initFilters(); } /** * Output subject schema file. * * @throws DITAOTException if generation files */ private void outputSubjectScheme() throws DITAOTException { try { final Map<URI, Set<URI>> graph = SubjectSchemeReader.readMapFromXML(new File(job.tempDir, FILE_NAME_SUBJECT_RELATION)); final Queue<URI> queue = new LinkedList<>(graph.keySet()); final Set<URI> visitedSet = new HashSet<>(); final DocumentBuilder builder = XMLUtils.getDocumentBuilder(); builder.setEntityResolver(CatalogUtils.getCatalogResolver()); while (!queue.isEmpty()) { final URI parent = queue.poll(); final Set<URI> children = graph.get(parent); if (children != null) { queue.addAll(children); } if (ROOT_URI.equals(parent) || visitedSet.contains(parent)) { continue; } visitedSet.add(parent); final File tmprel = new File(FileUtils.resolve(job.tempDir, parent) + SUBJECT_SCHEME_EXTENSION); final Document parentRoot; if (!tmprel.exists()) { final URI src = job.getFileInfo(parent).src; parentRoot = builder.parse(src.toString()); } else { parentRoot = builder.parse(tmprel); } if (children != null) { for (final URI childpath: children) { final Document childRoot = builder.parse(rootFile.resolve(childpath.getPath()).toString()); mergeScheme(parentRoot, childRoot); generateScheme(new File(job.tempDir, childpath.getPath() + SUBJECT_SCHEME_EXTENSION), childRoot); } } //Output parent scheme generateScheme(new File(job.tempDir, parent.getPath() + SUBJECT_SCHEME_EXTENSION), parentRoot); } } catch (final RuntimeException e) { throw e; } catch (final Exception e) { logger.error(e.getMessage(), e) ; throw new DITAOTException(e); } } private void mergeScheme(final Document parentRoot, final Document childRoot) { final Queue<Element> pQueue = new LinkedList<>(); pQueue.offer(parentRoot.getDocumentElement()); while (!pQueue.isEmpty()) { final Element pe = pQueue.poll(); NodeList pList = pe.getChildNodes(); for (int i = 0; i < pList.getLength(); i++) { final Node node = pList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { pQueue.offer((Element)node); } } String value = pe.getAttribute(ATTRIBUTE_NAME_CLASS); if (StringUtils.isEmptyString(value) || !SUBJECTSCHEME_SUBJECTDEF.matches(value)) { continue; } if (!StringUtils.isEmptyString( value = pe.getAttribute(ATTRIBUTE_NAME_KEYREF))) { // extend child scheme final Element target = searchForKey(childRoot.getDocumentElement(), value); if (target == null) { /* * TODO: we have a keyref here to extend into child scheme, but can't * find any matching <subjectdef> in child scheme. Shall we throw out * a warning? * * Not for now, just bypass it. */ continue; } // target found pList = pe.getChildNodes(); for (int i = 0; i < pList.getLength(); i++) { final Node tmpnode = childRoot.importNode(pList.item(i), false); if (tmpnode.getNodeType() == Node.ELEMENT_NODE && searchForKey(target, ((Element)tmpnode).getAttribute(ATTRIBUTE_NAME_KEYS)) != null) { continue; } target.appendChild(tmpnode); } } else if (!StringUtils.isEmptyString( value = pe.getAttribute(ATTRIBUTE_NAME_KEYS))) { // merge into parent scheme final Element target = searchForKey(childRoot.getDocumentElement(), value); if (target != null) { pList = target.getChildNodes(); for (int i = 0; i < pList.getLength(); i++) { final Node tmpnode = parentRoot.importNode(pList.item(i), false); if (tmpnode.getNodeType() == Node.ELEMENT_NODE && searchForKey(pe, ((Element)tmpnode).getAttribute(ATTRIBUTE_NAME_KEYS)) != null) { continue; } pe.appendChild(tmpnode); } } } } } private Element searchForKey(final Element root, final String key) { if (root == null || StringUtils.isEmptyString(key)) { return null; } final Queue<Element> queue = new LinkedList<>(); queue.offer(root); while (!queue.isEmpty()) { final Element pe = queue.poll(); final NodeList pchildrenList = pe.getChildNodes(); for (int i = 0; i < pchildrenList.getLength(); i++) { final Node node = pchildrenList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { queue.offer((Element)node); } } String value = pe.getAttribute(ATTRIBUTE_NAME_CLASS); if (StringUtils.isEmptyString(value) || !SUBJECTSCHEME_SUBJECTDEF.matches(value)) { continue; } value = pe.getAttribute(ATTRIBUTE_NAME_KEYS); if (StringUtils.isEmptyString(value)) { continue; } if (value.equals(key)) { return pe; } } return null; } /** * Serialize subject scheme file. * * @param filename output filepath * @param root subject scheme document * * @throws DITAOTException if generation fails */ private void generateScheme(final File filename, final Document root) throws DITAOTException { final File p = filename.getParentFile(); if (!p.exists() && !p.mkdirs()) { throw new DITAOTException("Failed to make directory " + p.getAbsolutePath()); } Result res = null; try { res = new StreamResult(new FileOutputStream(filename)); final DOMSource ds = new DOMSource(root); final TransformerFactory tff = TransformerFactory.newInstance(); final Transformer tf = tff.newTransformer(); tf.transform(ds, res); } catch (final RuntimeException e) { throw e; } catch (final Exception e) { logger.error(e.getMessage(), e) ; throw new DITAOTException(e); } finally { try { close(res); } catch (IOException e) { throw new DITAOTException(e); } } } /** * Get path to base directory * * @param filename relative input file path from base directory * @param traceFilename absolute input file * @param inputMap absolute path to start file * @return path to base directory, {@code null} if not available */ public static File getPathtoProject(final File filename, final File traceFilename, final File inputMap, final Job job) { if (job.getGeneratecopyouter() != Job.Generate.OLDSOLUTION) { if (isOutFile(traceFilename, inputMap)) { return toFile(getRelativePathFromOut(traceFilename.getAbsoluteFile(), job)); } else { return new File(getRelativeUnixPath(traceFilename.getAbsolutePath(), inputMap.getAbsolutePath())).getParentFile(); } } else { return FileUtils.getRelativePath(filename); } } /** * Just for the overflowing files. * @param overflowingFile overflowingFile * @return relative system path to out which ends in {@link java.io.File#separator File.separator} */ private static String getRelativePathFromOut(final File overflowingFile, final Job job) { final URI relativePath = getRelativePath(job.getInputFile(), overflowingFile.toURI()); final File outputDir = job.getOutputDir().getAbsoluteFile(); final File outputPathName = new File(outputDir, "index.html"); final File finalOutFilePathName = resolve(outputDir, relativePath.getPath()); final File finalRelativePathName = FileUtils.getRelativePath(finalOutFilePathName, outputPathName); File parentDir = finalRelativePathName.getParentFile(); if (parentDir == null || parentDir.getPath().isEmpty()) { parentDir = new File("."); } return parentDir.getPath() + File.separator; } /** * Check if path falls outside start document directory * * @param filePathName absolute path to test * @param inputMap absolute input map path * @return {@code true} if outside start directory, otherwise {@code false} */ private static boolean isOutFile(final File filePathName, final File inputMap){ final File relativePath = FileUtils.getRelativePath(inputMap.getAbsoluteFile(), filePathName.getAbsoluteFile()); return !(relativePath.getPath().length() == 0 || !relativePath.getPath().startsWith("..")); } /** * Lexical handler to forward DTD declaration into processing instructions. */ private final class DTDForwardHandler implements LexicalHandler { private final XMLReader parser; public DTDForwardHandler(XMLReader parser) { this.parser = parser; } @Override public void startDTD(final String name, final String publicId, final String systemId) throws SAXException { if (publicId != null && !publicId.isEmpty()) { parser.getContentHandler().processingInstruction("doctype-public", publicId); } if (systemId != null && !systemId.isEmpty()) { parser.getContentHandler().processingInstruction("doctype-system", systemId); } } @Override public void endDTD() throws SAXException {} @Override public void startEntity(String name) throws SAXException {} @Override public void endEntity(String name) throws SAXException {} @Override public void startCDATA() throws SAXException {} @Override public void endCDATA() throws SAXException {} @Override public void comment(char[] ch, int start, int length) throws SAXException {} } }
package eggdropsoap.spreadinglilypads; //import java.lang.reflect.*; import net.minecraft.block.Block; import net.minecraft.block.BlockLilyPad; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.item.ItemStack; import net.minecraftforge.common.Configuration; import cpw.mods.fml.common.Mod; import cpw.mods.fml.common.Mod.EventHandler; // used in 1.6.2 //import cpw.mods.fml.common.Mod.PreInit; // used in 1.5.2 //import cpw.mods.fml.common.Mod.Init; // used in 1.5.2 //import cpw.mods.fml.common.Mod.PostInit; // used in 1.5.2 import cpw.mods.fml.common.Mod.Instance; import cpw.mods.fml.common.SidedProxy; import cpw.mods.fml.common.event.FMLInitializationEvent; import cpw.mods.fml.common.event.FMLPostInitializationEvent; import cpw.mods.fml.common.event.FMLPreInitializationEvent; import cpw.mods.fml.common.network.NetworkMod; import cpw.mods.fml.common.registry.LanguageRegistry; @Mod(modid="SpreadingLilypads", name="Spreading Lilypads", version="0.1.0") @NetworkMod(clientSideRequired=true, serverSideRequired=false) public class SpreadingLilypads { public Block spreadingLilyPad; public int spreadingLilyPadID; // The instance of your mod that Forge uses. @Instance("SpreadingLilypads") public static SpreadingLilypads instance; // Says where the client and server 'proxy' code is loaded. @SidedProxy(clientSide="eggdropsoap.spreadinglilypads.client.ClientProxy", serverSide="eggdropsoap.spreadinglilypads.CommonProxy") public static CommonProxy proxy; @EventHandler // used in 1.6.2 //@PreInit // used in 1.5.2 public void preInit(FMLPreInitializationEvent event) { // initialise spreading block Block.blocksList[Block.waterlily.blockID] = null; spreadingLilyPad = (new BlockSpreadingLilyPad(Block.waterlily.blockID)) .setHardness(0.0F).setStepSound(Block.soundGrassFootstep) .setUnlocalizedName("spreadinglily") .func_111022_d("waterlily") .setCreativeTab(CreativeTabs.tabDecorations); } @EventHandler // used in 1.6.2 //@Init // used in 1.5.2 public void load(FMLInitializationEvent event) { proxy.registerRenderers(); LanguageRegistry.addName(spreadingLilyPad, "Spreading Lily Pad"); // replace worldgen lilypads with spreading lilies, // but keeping original blockID for save compatibility Block.blocksList[Block.waterlily.blockID] = spreadingLilyPad; } @EventHandler // used in 1.6.2 //@PostInit // used in 1.5.2 public void postInit(FMLPostInitializationEvent event) { // Stub Method } }
package org.jabref.logic.importer.fileformat; import java.io.BufferedReader; import java.io.IOException; import java.time.Year; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeParseException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.regex.Pattern; import java.util.Arrays; import org.jabref.logic.importer.Importer; import org.jabref.logic.importer.ParserResult; import org.jabref.logic.util.OS; import org.jabref.logic.util.StandardFileType; import org.jabref.model.entry.AuthorList; import org.jabref.model.entry.BibEntry; import org.jabref.model.entry.BibtexEntryTypes; import org.jabref.model.entry.FieldName; import org.jabref.model.entry.Month; public class RisImporter extends Importer { private static final Pattern RECOGNIZED_FORMAT_PATTERN = Pattern.compile("TY - .*"); private static DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy"); //stores all the date tags from highest to lowest priority private final List<String> dateTags = Arrays.asList("Y1", "PY", "DA", "Y2"); @Override public String getName() { return "RIS"; } @Override public StandardFileType getFileType() { return StandardFileType.RIS; } @Override public String getDescription() { return "Imports a Biblioscape Tag File."; } @Override public boolean isRecognizedFormat(BufferedReader reader) throws IOException { // Our strategy is to look for the "TY - *" line. return reader.lines().anyMatch(line -> RECOGNIZED_FORMAT_PATTERN.matcher(line).find()); } @Override public ParserResult importDatabase(BufferedReader reader) throws IOException { List<BibEntry> bibitems = new ArrayList<>(); //use optional here, so that no exception will be thrown if the file is empty String linesAsString = reader.lines().reduce((line, nextline) -> line + "\n" + nextline).orElse(""); String[] entries = linesAsString.replace("\u2013", "-").replace("\u2014", "--").replace("\u2015", "--") .split("ER -.*\\n"); for (String entry1 : entries) { String dateTag = ""; String dateValue = ""; int datePriority = dateTags.size(); String type = ""; String author = ""; String editor = ""; String startPage = ""; String endPage = ""; String comment = ""; Optional<Month> month = Optional.empty(); Map<String, String> fields = new HashMap<>(); String[] lines = entry1.split("\n"); for (int j = 0; j < lines.length; j++) { StringBuilder current = new StringBuilder(lines[j]); boolean done = false; while (!done && (j < (lines.length - 1))) { if ((lines[j + 1].length() >= 6) && !" - ".equals(lines[j + 1].substring(2, 6))) { if ((current.length() > 0) && !Character.isWhitespace(current.charAt(current.length() - 1)) && !Character.isWhitespace(lines[j + 1].charAt(0))) { current.append(' '); } current.append(lines[j + 1]); j++; } else { done = true; } } String entry = current.toString(); if (entry.length() < 6) { continue; } else { String tag = entry.substring(0, 2); String value = entry.substring(6).trim(); if ("TY".equals(tag)) { if ("BOOK".equals(value)) { type = "book"; } else if ("JOUR".equals(value) || "MGZN".equals(value)) { type = "article"; } else if ("THES".equals(value)) { type = "phdthesis"; } else if ("UNPB".equals(value)) { type = "unpublished"; } else if ("RPRT".equals(value)) { type = "techreport"; } else if ("CONF".equals(value)) { type = "inproceedings"; } else if ("CHAP".equals(value)) { type = "incollection";//"inbook"; } else if ("PAT".equals(value)) { type = "patent"; } else { type = "other"; } } else if ("T1".equals(tag) || "TI".equals(tag)) { String oldVal = fields.get(FieldName.TITLE); if (oldVal == null) { fields.put(FieldName.TITLE, value); } else { if (oldVal.endsWith(":") || oldVal.endsWith(".") || oldVal.endsWith("?")) { fields.put(FieldName.TITLE, oldVal + " " + value); } else { fields.put(FieldName.TITLE, oldVal + ": " + value); } } fields.put(FieldName.TITLE, fields.get(FieldName.TITLE).replaceAll("\\s+", " ")); // Normalize whitespaces } else if ("BT".equals(tag)) { fields.put(FieldName.BOOKTITLE, value); } else if (("T2".equals(tag) || "J2".equals(tag) || "JA".equals(tag)) && ((fields.get(FieldName.JOURNAL) == null) || "".equals(fields.get(FieldName.JOURNAL)))) { //if there is no journal title, then put second title as journal title fields.put(FieldName.JOURNAL, value); } else if ("JO".equals(tag) || "J1".equals(tag) || "JF".equals(tag)) { //if this field appears then this should be the journal title fields.put(FieldName.JOURNAL, value); } else if ("T3".equals(tag)) { fields.put(FieldName.SERIES, value); } else if ("AU".equals(tag) || "A1".equals(tag) || "A2".equals(tag) || "A3".equals(tag) || "A4".equals(tag)) { if ("".equals(author)) { author = value; } else { author += " and " + value; } } else if ("ED".equals(tag)) { if (editor.isEmpty()) { editor = value; } else { editor += " and " + value; } } else if ("JA".equals(tag) || "JF".equals(tag)) { if ("inproceedings".equals(type)) { fields.put(FieldName.BOOKTITLE, value); } else { fields.put(FieldName.JOURNAL, value); } } else if ("LA".equals(tag)) { fields.put(FieldName.LANGUAGE, value); } else if ("CA".equals(tag)) { fields.put("caption", value); } else if ("DB".equals(tag)) { fields.put("database", value); } else if ("IS".equals(tag) || "AN".equals(tag) || "C7".equals(tag) || "M1".equals(tag)) { fields.put(FieldName.NUMBER, value); } else if ("SP".equals(tag)) { startPage = value; } else if ("PB".equals(tag)) { if ("phdthesis".equals(type)) { fields.put(FieldName.SCHOOL, value); } else { fields.put(FieldName.PUBLISHER, value); } } else if ("AD".equals(tag) || "CY".equals(tag) || "PP".equals(tag)) { fields.put(FieldName.ADDRESS, value); } else if ("EP".equals(tag)) { endPage = value; if (!endPage.isEmpty()) { endPage = "--" + endPage; } } else if ("ET".equals(tag)) { fields.put(FieldName.EDITION, value); } else if ("SN".equals(tag)) { fields.put(FieldName.ISSN, value); } else if ("VL".equals(tag)) { fields.put(FieldName.VOLUME, value); } else if ("N2".equals(tag) || "AB".equals(tag)) { String oldAb = fields.get(FieldName.ABSTRACT); if (oldAb == null) { fields.put(FieldName.ABSTRACT, value); } else { fields.put(FieldName.ABSTRACT, oldAb + OS.NEWLINE + value); } } else if ("UR".equals(tag) || "L2".equals(tag) || "LK".equals(tag)) { fields.put(FieldName.URL, value); } else if (isDateTag(tag) && value.length() >= 4) { int tagPriority = getDatePriority(tag); if (tagPriority < datePriority) { String year = value.substring(0, 4); try { Year.parse(year, formatter); //if the year is parsebale we have found a higher priority date dateTag = tag; dateValue = value; datePriority = tagPriority; } catch (DateTimeParseException ex) { //We can't parse the year, we ignore it } } } else if ("KW".equals(tag)) { if (fields.containsKey(FieldName.KEYWORDS)) { String kw = fields.get(FieldName.KEYWORDS); fields.put(FieldName.KEYWORDS, kw + ", " + value); } else { fields.put(FieldName.KEYWORDS, value); } } else if ("U1".equals(tag) || "U2".equals(tag) || "N1".equals(tag)) { if (!comment.isEmpty()) { comment = comment + OS.NEWLINE; } comment = comment + value; } else if ("M3".equals(tag) || "DO".equals(tag)) { addDoi(fields, value); } else if ("C3".equals(tag)) { fields.put(FieldName.EVENTTITLE, value); } else if ("N1".equals(tag) || "RN".equals(tag)) { fields.put(FieldName.NOTE, value); } else if ("ST".equals(tag)) { fields.put(FieldName.SHORTTITLE, value); } else if ("C2".equals(tag)) { fields.put(FieldName.EPRINT, value); fields.put(FieldName.EPRINTTYPE, "pubmed"); } else if ("TA".equals(tag)) { fields.put(FieldName.TRANSLATOR, value); } // fields for which there is no direct mapping in the bibtext standard else if ("AV".equals(tag)) { fields.put("archive_location", value); } else if ("CN".equals(tag) || "VO".equals(tag)) { fields.put("call-number", value); } else if ("DB".equals(tag)) { fields.put("archive", value); } else if ("NV".equals(tag)) { fields.put("number-of-volumes", value); } else if ("OP".equals(tag)) { fields.put("original-title", value); } else if ("RI".equals(tag)) { fields.put("reviewed-title", value); } else if ("RP".equals(tag)) { fields.put("status", value); } else if ("SE".equals(tag)) { fields.put("section", value); } else if ("ID".equals(tag)) { fields.put("refid", value); } } // fix authors if (!author.isEmpty()) { author = AuthorList.fixAuthorLastNameFirst(author); fields.put(FieldName.AUTHOR, author); } if (!editor.isEmpty()) { editor = AuthorList.fixAuthorLastNameFirst(editor); fields.put(FieldName.EDITOR, editor); } if (!comment.isEmpty()) { fields.put(FieldName.COMMENT, comment); } fields.put(FieldName.PAGES, startPage + endPage); } // if we found a date if (dateTag.length() > 0) { fields.put(FieldName.YEAR, dateValue.substring(0, 4)); String[] parts = dateValue.split("/"); if ((parts.length > 1) && !parts[1].isEmpty()) { try { int monthNumber = Integer.parseInt(parts[1]); month = Month.getMonthByNumber(monthNumber); } catch (NumberFormatException ex) { // The month part is unparseable, so we ignore it. } } } // Remove empty fields: fields.entrySet().removeIf(key -> (key.getValue() == null) || key.getValue().trim().isEmpty()); // create one here // type is set in the loop above BibEntry entry = new BibEntry(BibtexEntryTypes.getTypeOrDefault(type)); entry.setField(fields); // month has a special treatment as we use the separate method "setMonth" of BibEntry instead of directly setting the value month.ifPresent(entry::setMonth); bibitems.add(entry); } return new ParserResult(bibitems); } private void addDoi(Map<String, String> hm, String val) { String doi = val.toLowerCase(Locale.ENGLISH); if (doi.startsWith("doi:")) { doi = doi.replaceAll("(?i)doi:", "").trim(); hm.put(FieldName.DOI, doi); } } private boolean isDateTag(String searchTag) { return dateTags.stream().anyMatch(tag -> tag.equals(searchTag)); } private int getDatePriority(String dateTag) { return dateTags.indexOf(dateTag); } }
package org.jenkins.ci.plugins.jobimport; import hudson.Extension; import hudson.model.RootAction; import hudson.model.Hudson; import hudson.util.FormValidation; import java.io.IOException; import java.net.MalformedURLException; import java.util.Arrays; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.logging.Logger; import javax.servlet.ServletException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.xpath.XPathExpressionException; import org.apache.commons.lang.StringUtils; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.xml.sax.SAXException; /** * @author <a href="mailto:jieryn@gmail.com">Jesse Farinacci</a> * @since 1.0 */ @Extension public final class JobImportAction implements RootAction { private static final Logger LOG = Logger .getLogger(JobImportAction.class .getName()); private String remoteUrl; private final SortedSet<RemoteJob> remoteJobs = new TreeSet<RemoteJob>(); private final SortedMap<RemoteJob, RemoteJobImportStatus> remoteJobsImportStatus = new TreeMap<RemoteJob, RemoteJobImportStatus>(); public void doClear(final StaplerRequest request, final StaplerResponse response) throws ServletException, IOException { remoteUrl = null; remoteJobs.clear(); remoteJobsImportStatus.clear(); response.sendRedirect(Hudson.getInstance().getRootUrl()); } public void doImport(final StaplerRequest request, final StaplerResponse response) throws ServletException, IOException { remoteJobsImportStatus.clear(); if (isRemoteJobsAvailable()) { if (request.hasParameter("jobUrl")) { for (final String jobUrl : Arrays.asList(request.getParameterValues("jobUrl"))) { final RemoteJob remoteJob = getRemoteJobs(jobUrl); if (remoteJob != null) { if (!remoteJobsImportStatus.containsKey(remoteJob)) { remoteJobsImportStatus.put(remoteJob, new RemoteJobImportStatus(remoteJob)); } remoteJobsImportStatus.get(remoteJob).setStatus("SUCCESS!"); } } } } response.forwardToPreviousPage(request); } public void doQuery(final StaplerRequest request, final StaplerResponse response) throws ServletException, IOException { remoteJobs.clear(); remoteJobsImportStatus.clear(); remoteUrl = request.getParameter("remoteUrl"); try { if (StringUtils.isNotEmpty(remoteUrl)) { remoteJobs.addAll(RemoteJobUtils.fromXml(URLUtils.fetchUrl(remoteUrl + "/api/xml"))); } } catch (final XPathExpressionException e) { // fall through } catch (final MalformedURLException e) { // fall through } catch (final SAXException e) { // fall through } catch (final IOException e) { // fall through } catch (final ParserConfigurationException e) { // fall through } response.forwardToPreviousPage(request); } public FormValidation doTestConnection(@QueryParameter("remoteUrl") final String remoteUrl) { return FormValidation.ok(); } public String getDisplayName() { return Messages.DisplayName(); } public String getIconFileName() { return "/images/32x32/setting.png"; } public SortedSet<RemoteJob> getRemoteJobs() { return remoteJobs; } private RemoteJob getRemoteJobs(final String jobUrl) { if (StringUtils.isNotEmpty(jobUrl)) { for (final RemoteJob remoteJob : remoteJobs) { if (jobUrl.equals(remoteJob.getUrl())) { return remoteJob; } } } return null; } public SortedMap<RemoteJob, RemoteJobImportStatus> getRemoteJobsImportStatus() { return remoteJobsImportStatus; } public String getRemoteUrl() { return remoteUrl; } public String getUrlName() { return "/job-import"; } public boolean isRemoteJobsAvailable() { return remoteJobs.size() > 0; } public boolean isRemoteJobsImportStatusAvailable() { return remoteJobsImportStatus.size() > 0; } public void setRemoteUrl(final String remoteUrl) { this.remoteUrl = remoteUrl; } }
package org.jenkins.plugins.appaloosa; import com.appaloosastore.client.AppaloosaClient; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.Action; import hudson.model.BuildListener; import hudson.model.Hudson; import hudson.model.Node; import hudson.model.Result; import hudson.plugins.promoted_builds.Promotion; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.BuildStepMonitor; import hudson.tasks.Publisher; import hudson.tasks.Recorder; import hudson.util.FormValidation; import hudson.util.RunList; import net.sf.json.JSONObject; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.Predicate; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang.StringUtils; import org.kohsuke.stapler.AncestorInPath; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; public class AppaloosaPublisher extends Recorder { public final String token; public final String filePattern; public final String proxyHost; public final String proxyUser; public final String proxyPass; public final int proxyPort; @DataBoundConstructor public AppaloosaPublisher(String token, String filePattern, String proxyHost, String proxyUser, String proxyPass, int proxyPort) { this.token = token; this.filePattern = filePattern; this.proxyHost = proxyHost; this.proxyUser = proxyUser; this.proxyPass = proxyPass; this.proxyPort = proxyPort; } @Override public DescriptorImpl getDescriptor() { return (DescriptorImpl) super.getDescriptor(); } public BuildStepMonitor getRequiredMonitorService() { return BuildStepMonitor.NONE; } @Override public boolean perform(AbstractBuild build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { if (build.getResult().isWorseOrEqualTo(Result.FAILURE)) return true; // nothing to do // Validates that the organization token is filled in the project configuration. if (StringUtils.isBlank(token)) { listener.error(Messages._AppaloosaPublisher_noToken().toString()); return false; } // Validates that the file pattern is filled in the project configuration. if (StringUtils.isBlank(filePattern)) { listener.error(Messages._AppaloosaPublisher_noFilePattern().toString()); return false; } //search file in the workspace with the pattern FileFinder fileFinder = new FileFinder(filePattern); // Where we'll get artifacts from FilePath rootDir; // If the promotion plugin is used we have to take care to get data from the original build (not the promotion build) if (Hudson.getInstance().getPlugin("promoted-builds") != null && build instanceof Promotion) { rootDir = new FilePath (((Promotion) build).getTarget().getArtifactsDir()); } else { rootDir = build.getWorkspace(); if (rootDir==null) { // slave down? listener.error(Messages.AppaloosaPublisher_buildWorkspaceUnavailable()); return false; } } listener.getLogger().println(Messages.AppaloosaPublisher_RootDirectory(rootDir)); List<String> fileNames = rootDir.act(fileFinder); listener.getLogger().println(Messages.AppaloosaPublisher_foundFiles(fileNames)); if (fileNames.isEmpty()) { listener.error(Messages._AppaloosaPublisher_noArtifactsFound(filePattern).toString()); return false; } // Initialize Appaloosa Client AppaloosaClient appaloosaClient = new AppaloosaClient(token,proxyHost,proxyPort,proxyUser,proxyPass); appaloosaClient.useLogger(listener.getLogger()); boolean result=true; // Deploy each artifact found for (String filename : fileNames) { File tmpArchive = File.createTempFile("jenkins", "temp-appaloosa-deploy."+FilenameUtils.getExtension(filename)); try { // handle remote slave case so copy binary locally Node buildNode = Hudson.getInstance().getNode(build.getBuiltOnStr()); FilePath tmpLocalFile = new FilePath(tmpArchive); FilePath remoteFile = rootDir.child(filename); remoteFile.copyTo(tmpLocalFile); listener.getLogger().println(Messages.AppaloosaPublisher_deploying(filename)); appaloosaClient.deployFile(tmpArchive.getAbsolutePath()); listener.getLogger().println(Messages.AppaloosaPublisher_deployed()); } catch (Exception e) { listener.getLogger().println(Messages.AppaloosaPublisher_deploymentFailed(e.getMessage())); result=false; } finally { FileUtils.deleteQuietly(tmpArchive); } } return result; } @Override public Collection<? extends Action> getProjectActions(AbstractProject<?, ?> project) { ArrayList<AppaloosaBuildAction> actions = new ArrayList<AppaloosaBuildAction>(); RunList<? extends AbstractBuild<?, ?>> builds = project.getBuilds(); Collection predicated = CollectionUtils.select(builds, new Predicate() { public boolean evaluate(Object o) { Result r = ((AbstractBuild<?, ?>) o).getResult(); return r!=null && r.isBetterOrEqualTo(Result.SUCCESS); } }); ArrayList<AbstractBuild<?, ?>> filteredList = new ArrayList<AbstractBuild<?, ?>>(predicated); Collections.reverse(filteredList); for (AbstractBuild<?, ?> build : filteredList) { List<AppaloosaBuildAction> appaloosaActions = build.getActions(AppaloosaBuildAction.class); if (appaloosaActions != null && appaloosaActions.size() > 0) { for (AppaloosaBuildAction action : appaloosaActions) { actions.add(new AppaloosaBuildAction(action)); } break; } } return actions; } @Extension // This indicates to Jenkins that this is an implementation of an extension point. public static final class DescriptorImpl extends BuildStepDescriptor<Publisher> { public DescriptorImpl() { super(AppaloosaPublisher.class); load(); } public boolean isApplicable(Class<? extends AbstractProject> aClass) { // Indicates that this builder can be used with all kinds of project types return true; } @Override public boolean configure(StaplerRequest req, JSONObject json) throws FormException { // XXX is this now the right style? req.bindJSON(this, json); save(); return true; } /** * Performs on-the-fly validation on the file mask wildcard. */ public FormValidation doCheckFilePattern(@AncestorInPath AbstractProject project, @QueryParameter String value) throws IOException { return FilePath.validateFileMask(project.getSomeWorkspace(),value); } /** * This human readable name is used in the configuration screen. */ public String getDisplayName() { return Messages.AppaloosaPublisher_uploadToAppaloosa(); } } }
package org.mklab.taskit.server.roommap.cell; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * * * @author Yuhi Ishikura */ public class CellRoomMap { private Cell[][] cells; private Map<String, Cell> userIdToCell = new HashMap<String, Cell>(); /** * CSVmapfile * * @param file * @return * @throws IOException */ public static CellRoomMap load(File file) throws IOException { Reader reader = new InputStreamReader(new FileInputStream(file)); try { return load(reader); } catch (IOException ex) { throw ex; } finally { reader.close(); } } /** * CSV * * @param reader * @return * @throws IOException */ public static CellRoomMap load(Reader reader) throws IOException { List<List<Cell>> cellList = new ArrayList<List<Cell>>(); int maximumColumnCount = 0; final BufferedReader br = new BufferedReader(reader); String line; while ((line = br.readLine()) != null) { line = line.replaceAll(",,", ", ,"); //$NON-NLS-1$ //$NON-NLS-2$ final String[] s = line.split(","); //$NON-NLS-1$ final List<Cell> cellsX = new ArrayList<Cell>(s.length); for (int i = 0; i < cellsX.size(); i++) { String cell = s[i].trim(); if (cell.length() == 0) { cellsX.add(Cell.EMPTY_CELL); } else { cellsX.add(new Cell(cell)); } } if (cellsX.size() > maximumColumnCount) maximumColumnCount = cellsX.size(); cellList.add(cellsX); } final Cell[][] cells = new Cell[cellList.size()][maximumColumnCount]; int i = 0; for (List<Cell> row : cellList) { while (row.size() < maximumColumnCount) { row.add(Cell.EMPTY_CELL); } cells[i++] = row.toArray(new Cell[row.size()]); } return new CellRoomMap(cells); } private CellRoomMap(Cell[][] cells) { this.cells = cells; for (Cell[] row : cells) { for (Cell cell : row) { this.userIdToCell.put(cell.getUserId(), cell); } } } /** * (x,y) * * @param x x * @param y y * @return */ public Cell getCell(int x, int y) { return this.cells[y][x]; } /** * * * @param userId ID * @return null */ public Cell getCellFor(String userId) { return this.userIdToCell.get(userId); } /** * * * @return */ public int getCellCountX() { return this.cells[0].length; } /** * * * @return */ public int getCellCountY() { return this.cells.length; } }
package org.openlmis.fulfillment.domain; import java.util.UUID; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Table; import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.ToString; import org.hibernate.annotations.Type; import org.javers.core.metamodel.annotation.TypeName; @Entity @Table(name = "shipment_line_items") @TypeName("ShipmentLineItem") @AllArgsConstructor @ToString public class ShipmentLineItem extends BaseEntity { @Type(type = UUID_TYPE) @Column(nullable = false) @Getter private UUID orderableId; @Type(type = UUID_TYPE) @Getter private UUID lotId; @Column(nullable = false) @Getter(AccessLevel.PACKAGE) private Long quantityShipped; // Constructor needed by framework. Use all args constructor to create new instance. private ShipmentLineItem() {} public ShipmentLineItem(UUID orderableId, Long quantityShipped) { this(orderableId, null, quantityShipped); } /** * Creates new instance based on data from {@link Importer} * * @param importer instance of {@link Importer} * @return new instance of shipment line item. */ protected static ShipmentLineItem newInstance(Importer importer) { ShipmentLineItem shipmentLineItem = new ShipmentLineItem( importer.getOrderableId(), importer.getLotId(), importer.getQuantityShipped()); shipmentLineItem.setId(importer.getId()); return shipmentLineItem; } /** * Verifies if the given line item has something to be shipped. */ public boolean isShipped() { return null != quantityShipped && quantityShipped > 0; } /** * Exports data from the given shipment to the instance that implement * {@link Exporter} interface. */ public void export(Exporter exporter) { exporter.setId(getId()); exporter.setOrderableId(orderableId); exporter.setLotId(lotId); exporter.setQuantityShipped(quantityShipped); } /** * Returns a copy of line item. */ public ShipmentLineItem copy() { ShipmentLineItem clone = new ShipmentLineItem(orderableId, lotId, quantityShipped); clone.setId(id); return clone; } public interface Exporter { void setId(UUID id); void setOrderableId(UUID orderableId); void setLotId(UUID lotId); void setQuantityShipped(Long quantityShipped); } public interface Importer { UUID getId(); UUID getOrderableId(); UUID getLotId(); Long getQuantityShipped(); } }
package org.pfaa.fabrica.registration; import java.util.Collections; import java.util.List; import org.pfaa.chemica.ChemicaItems; import org.pfaa.chemica.fluid.IndustrialFluids; import org.pfaa.chemica.item.IndustrialMaterialItem; import org.pfaa.chemica.item.ItemIngredientStack; import org.pfaa.chemica.item.MaterialStack; import org.pfaa.chemica.model.Aggregate.Aggregates; import org.pfaa.chemica.model.Compound.Compounds; import org.pfaa.chemica.model.Condition; import org.pfaa.chemica.model.IndustrialMaterial; import org.pfaa.chemica.model.Reaction; import org.pfaa.chemica.model.State; import org.pfaa.chemica.model.Strength; import org.pfaa.chemica.processing.Form.Forms; import org.pfaa.chemica.registration.BaseRecipeRegistration; import org.pfaa.chemica.registration.IngredientList; import org.pfaa.chemica.registration.RecipeUtils; import org.pfaa.chemica.util.ChanceStack; import org.pfaa.fabrica.FabricaBlocks; import org.pfaa.fabrica.FabricaItems; import org.pfaa.fabrica.model.Generic.Generics; import org.pfaa.fabrica.model.Intermediate.Intermediates; import org.pfaa.geologica.GeologicaItems; import org.pfaa.geologica.processing.IndustrialMineral.IndustrialMinerals; import org.pfaa.geologica.processing.OreMineral.Ores; import org.pfaa.geologica.processing.Solutions; import cpw.mods.fml.common.registry.GameRegistry; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.item.ItemStack; import net.minecraftforge.fluids.FluidRegistry; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.oredict.OreDictionary; public class RecipeRegistration extends BaseRecipeRegistration { public static void init() { grindIntermediates(); grindBricks(); grindSlag(); hydrateHardenedClay(); useFeldsparAsFlux(); calcineMaterials(); makeAsh(); makePortlandCement(); makePozzolanicCement(); makeConcrete(); makeDrywall(); makeDrywallJointCompound(); fillPigments(); makePhosphoricAcid(); makeSodaAsh(); makeHood(); } private static void registerCrushingRecipe(Aggregates aggregate, int nPerBlock) { MaterialStack materialStack = new MaterialStack(Forms.BLOCK, aggregate); ItemStack output = ChemicaItems.AGGREGATE_DUST.getItemStack(aggregate, nPerBlock); for (ItemStack input : materialStack.getItemStacks()) { RECIPES.registerCrushingRecipe(input, output, null, Strength.WEAK); } } private static void hydrateHardenedClay() { registerCrushingRecipe(Aggregates.HARDENED_CLAY, 4); FluidStack water = new FluidStack(FluidRegistry.WATER, IndustrialFluids.getAmount(Forms.PILE)); GENERICS.registerMixingRecipe( new IngredientList(Aggregates.HARDENED_CLAY), water, null, new ItemStack(Items.clay_ball), null, null, Condition.STP, null); } private static void grindIntermediates() { for (Intermediates material : FabricaItems.INTERMEDIATE_LUMP.getIndustrialMaterials()) { ItemStack input = FabricaItems.INTERMEDIATE_LUMP.getItemStack(material); ItemStack output = FabricaItems.INTERMEDIATE_DUST.getItemStack(material); RECIPES.registerGrindingRecipe(input, output, Collections.<ChanceStack>emptyList(), Strength.WEAK); } } private static void grindBricks() { ItemStack output = FabricaItems.INTERMEDIATE_DUST.getItemStack(Intermediates.FIRED_CLAY); RECIPES.registerGrindingRecipe(new ItemStack(Items.brick), output, Collections.<ChanceStack>emptyList(), Strength.MEDIUM); } private static void grindSlag() { ItemStack output = FabricaItems.INTERMEDIATE_DUST.getItemStack(Intermediates.SLAG); GENERICS.registerGrindingRecipe(new MaterialStack(Forms.LUMP, Intermediates.SLAG), output, Collections.<ChanceStack>emptyList(), Strength.STRONG); } private static void calcineMaterial(IndustrialMaterial mineral, IndustrialMaterial calcined, int temp) { IngredientList inputs = new IngredientList(new MaterialStack(mineral)); ItemStack output = new MaterialStack(calcined).getBestItemStack(); GENERICS.registerRoastingRecipe(inputs, output, null, temp); } private static void calcineMaterials() { calcineMaterial(IndustrialMinerals.DIATOMITE, Intermediates.CALCINED_DIATOMITE, 1300); calcineMaterial(IndustrialMinerals.KAOLINITE, Intermediates.METAKAOLIN, 1000); calcineMaterial(Intermediates.METAKAOLIN, Intermediates.SPINEL, 1200); calcineMaterial(Intermediates.SPINEL, Intermediates.MULLITE, 1600); calcineMaterial(Ores.GYPSUM, Intermediates.GYPSUM_PLASTER, 600); calcineMaterial(IndustrialMinerals.TRONA, Compounds.Na2CO3, 600); } private static void makePortlandCement() { ItemStack clinker = FabricaItems.INTERMEDIATE_LUMP.getItemStack(Intermediates.PORTLAND_CLINKER, 4); IngredientList inputs = new IngredientList( new MaterialStack(Ores.CALCITE, 3), new MaterialStack(Forms.CLUMP, Aggregates.CLAY)); GENERICS.registerRoastingRecipe(inputs, clinker, null, 1700); mixIntermediate(FabricaItems.INTERMEDIATE_LUMP, Intermediates.PORTLAND_CEMENT); } private static void makePozzolanicCement() { mixIntermediate(FabricaItems.INTERMEDIATE_DUST, Intermediates.POZZOLANIC_CEMENT); } private static void makeConcrete() { /* * TODO: * - Support coal fly ash and silica fume as pozzalans, once we can capture them * - Support geopolymer concrete */ List<ItemStack> concretes = OreDictionary.getOres("concrete"); if (concretes.size() > 0) { ItemStack concrete = concretes.get(0).copy(); concrete.stackSize = 8; IngredientList inputs = new IngredientList( new MaterialStack(null, Aggregates.GRAVEL), new MaterialStack(null, Aggregates.SAND), new MaterialStack(Generics.CEMENT)); FluidStack water = new FluidStack(FluidRegistry.WATER, IndustrialFluids.getAmount(Forms.DUST)); GENERICS.registerMixingRecipe(inputs, water, null, concrete, null, null, Condition.STP, null); } } private static void makeDrywall() { RecipeUtils.addShapedRecipe(new ItemStack(FabricaBlocks.DRYWALL, 8), "pgp", "pwp", "pgp", 'p', Items.paper, 'g', Intermediates.GYPSUM_PLASTER, 'w', Items.water_bucket); } private static void makeDrywallJointCompound() { IngredientList jointCompoundSolids = new IngredientList( new MaterialStack(Forms.DUST, Generics.JOINT_COMPOUND_FILLER), new MaterialStack(Forms.DUST_TINY, Generics.BINDER), new MaterialStack(Forms.DUST_TINY, IndustrialMinerals.PALYGORSKITE), new MaterialStack(Forms.DUST_TINY, Generics.FILLER) ); FluidStack water = new FluidStack(FluidRegistry.WATER, IndustrialFluids.getAmount(Forms.DUST)); GENERICS.registerMixingRecipe(jointCompoundSolids, water, null, new ItemStack(FabricaItems.JOINT_COMPOUND, 4), null, null, Condition.STP, null); } private static void useFeldsparAsFlux() { ItemStack output = new ItemStack(Blocks.glass); ItemStack input = new ItemStack(Blocks.sand); MaterialStack feldspar = new MaterialStack(Forms.DUST_TINY, IndustrialMinerals.FELDSPAR); GENERICS.registerCastingRecipe(input, output, feldspar, 1500); output = new ItemStack(Items.brick); input = new ItemStack(Items.clay_ball); GENERICS.registerCastingRecipe(input, output, feldspar, 1500); } private static void mixIntermediate(IndustrialMaterialItem<Intermediates> item, Intermediates material) { IngredientList inputs = RecipeUtils.getMixtureInputs(item.getForm(), material); GENERICS.registerMixingRecipe(inputs, item.getItemStack(material)); } private static void makeAsh() { ItemStack bonemeal = new ItemStack(Items.dye, 1, 15); RECIPES.registerRoastingRecipe(Collections.singletonList(bonemeal), FabricaItems.INTERMEDIATE_DUST.getItemStack(Intermediates.ASH), null, 1000); } private static void fillPigments() { for (ItemStack dye : OreDictionary.getOres("dye")) { ItemStack doubleDye = dye.copy(); doubleDye.stackSize = 2; GENERICS.registerMixingRecipe( new IngredientList(new ItemIngredientStack(dye), new MaterialStack(Generics.FILLER)), doubleDye); } } private static void makeSodaAsh() { float brineConcentration = (float)Solutions.PURIFIED_BRINE.getComponents().get(1).weight; // FIXME: drop Term.concentration, instead define this reaction normally and create a generic // operation that overrides the NaCl with brine. Reaction reaction = Reaction.inWaterOf(Compounds.NaCl, brineConcentration). with(Compounds.CO2). with(Compounds.NH3). with(Compounds.H2O). yields(Compounds.NaHCO3). and(Compounds.NH4Cl); CONVERSIONS.register(reaction); } private static void makePhosphoricAcid() { IngredientList solidInputs = new IngredientList(new MaterialStack(Forms.DUST, IndustrialMinerals.APATITE)); FluidStack sulfuricAcid = IndustrialFluids.getCanonicalFluidStack(Compounds.H2SO4, State.AQUEOUS); FluidStack phosphoricAcid = IndustrialFluids.getCanonicalFluidStack(Compounds.H3PO4, State.AQUEOUS); ItemStack gypsum = GeologicaItems.ORE_MINERAL_DUST.getItemStack(Ores.GYPSUM); GENERICS.registerMixingRecipe(solidInputs, sulfuricAcid, null, gypsum, phosphoricAcid, null, Condition.STP, null); } private static void makeHood() { GameRegistry.addShapedRecipe(new ItemStack(FabricaBlocks.HOOD), "sss", "sts", "sgs", 's', Blocks.cobblestone, 't', Blocks.trapdoor, 'g', Blocks.glass); } }
package org.pharmgkb.pharmcat.reporter.io; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import com.google.common.base.Preconditions; import com.google.gson.Gson; import org.pharmgkb.pharmcat.haplotype.NamedAlleleMatcher; import org.pharmgkb.pharmcat.haplotype.ResultSerializer; import org.pharmgkb.pharmcat.haplotype.model.GeneCall; import org.pharmgkb.pharmcat.haplotype.model.Result; import org.pharmgkb.pharmcat.reporter.model.DosingGuideline; import org.pharmgkb.pharmcat.reporter.model.GuidelinePackage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class takes JSON files and deserializes them into Objects through GSON. */ public class JsonFileLoader { private static final Logger sf_logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String CPIC_SOURCE = "CPIC"; private final Gson gson = new Gson(); /** * Load all the gene calls coming from the {@link NamedAlleleMatcher} utility */ public List<GeneCall> loadHaplotypeGeneCalls(@Nonnull Path haplotypeCalledFile) throws IOException{ Preconditions.checkNotNull(haplotypeCalledFile); Preconditions.checkArgument(Files.exists(haplotypeCalledFile)); Preconditions.checkArgument(Files.isRegularFile(haplotypeCalledFile)); sf_logger.debug("Loading haplotyper file {}", haplotypeCalledFile); Result namResult = new ResultSerializer().fromJson(haplotypeCalledFile); return namResult.getGeneCalls(); } /** * Load the <strong>CPIC</strong> guideline annotations into {@link DosingGuideline} objects from the list of guideline {@link File} * list */ public List<GuidelinePackage> loadGuidelines(List<Path> guidelineFileList) throws IOException { List<GuidelinePackage> guidelines = new ArrayList<>(); for (Path guidelineFile : guidelineFileList) { try (BufferedReader br = Files.newBufferedReader(guidelineFile)) { GuidelinePackage guidelinePackage = gson.fromJson(br, GuidelinePackage.class); DosingGuideline guideline = guidelinePackage.getGuideline(); if (guideline.getSource().equals(CPIC_SOURCE) && guideline.isRecommendation()) { guidelines.add(guidelinePackage); } } } return guidelines; } }
package org.psjava.algo.graph.shortestpath; import java.util.LinkedList; import org.psjava.ds.graph.DirectedWeightedEdge; import org.psjava.ds.graph.DirectedWeightedGraph; import org.psjava.ds.map.MutableMap; import org.psjava.goods.GoodMutableMapFactory; import org.psjava.javautil.AssertStatus; import org.psjava.javautil.Pair; import org.psjava.math.ns.AddableNumberSystem; public class FloydWarshall implements AllPairShortestPath { private static class Status<W> { W distance = null; Object next = null; DirectedWeightedEdge<W> directEdge = null; } @Override public <W> AllPairShortestPathResult<W> calc(DirectedWeightedGraph<W> graph, AddableNumberSystem<W> ns) { MutableMap<Pair<Object, Object>, Status<W>> status = GoodMutableMapFactory.getInstance().create(); for (Object v1 : graph.getVertices()) for (Object v2 : graph.getVertices()) status.put(Pair.create(v1, v2), new Status<W>()); for (Object v : graph.getVertices()) status.get(Pair.create(v, v)).distance = ns.getZero(); for (DirectedWeightedEdge<W> edge : graph.getEdges()) { Status<W> s = status.get(Pair.create(edge.from(), edge.to())); if (s.distance == null || ns.compare(s.distance, edge.weight()) > 0) { s.distance = edge.weight(); s.directEdge = edge; } } for (Object k : graph.getVertices()) for (Object i : graph.getVertices()) for (Object j : graph.getVertices()) { Status<W> i2k = status.get(Pair.create(i, k)); Status<W> k2j = status.get(Pair.create(k, j)); if (i2k.distance != null && k2j.distance != null) { W newd = ns.add(i2k.distance, k2j.distance); Status<W> s = status.get(Pair.create(i, j)); if (s.distance == null || ns.compare(s.distance, newd) > 0) { s.distance = newd; s.next = k; } } } for (Object k : graph.getVertices()) AssertStatus.assertTrue(!ns.isNegative(status.get(Pair.create(k, k)).distance), "contains negative cycle"); return createResult(status); } private <W> AllPairShortestPathResult<W> createResult(final MutableMap<Pair<Object, Object>, Status<W>> status) { return new AllPairShortestPathResult<W>() { @Override public Iterable<DirectedWeightedEdge<W>> getPath(Object from, Object to) { assertReachable(from, to); LinkedList<DirectedWeightedEdge<W>> list = new LinkedList<DirectedWeightedEdge<W>>(); getPathRecursively(list, from, to); return list; } private void getPathRecursively(LinkedList<DirectedWeightedEdge<W>> list, Object from, Object to) { if (!from.equals(to)) { Status<W> s = status.get(Pair.create(from, to)); if (s.next == null) { list.add(s.directEdge); } else { getPathRecursively(list, from, s.next); getPathRecursively(list, s.next, to); } } } @Override public W getDistance(Object from, Object to) { assertReachable(from, to); return status.get(Pair.create(from, to)).distance; } private void assertReachable(Object from, Object to) { AssertStatus.assertTrue(isReachable(from, to), "not reachable"); } @Override public boolean isReachable(Object from, Object to) { Status<W> s = status.get(Pair.create(from, to), null); AssertStatus.assertTrue(s != null, "not valid vertex"); return s.distance != null; } }; } }
package org.radarcns.management.web.rest; import com.codahale.metrics.annotation.Timed; import io.github.jhipster.web.util.ResponseUtil; import org.radarcns.management.domain.Subject; import org.radarcns.management.repository.SubjectRepository; import org.radarcns.management.security.AuthoritiesConstants; import org.radarcns.management.security.SecurityUtils; import org.radarcns.management.service.SourceService; import org.radarcns.management.service.SubjectService; import org.radarcns.management.service.dto.SourceDTO; import org.radarcns.management.service.dto.SubjectDTO; import org.radarcns.management.service.mapper.SourceMapper; import org.radarcns.management.service.mapper.SubjectMapper; import org.radarcns.management.web.rest.util.HeaderUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.ResponseEntity; import org.springframework.security.access.AccessDeniedException; import org.springframework.security.access.annotation.Secured; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; /** * REST controller for managing Subject. */ @RestController @RequestMapping("/api") public class SubjectResource { private final Logger log = LoggerFactory.getLogger(SubjectResource.class); private static final String ENTITY_NAME = "subject"; @Autowired private SubjectService subjectService; @Autowired private SubjectRepository subjectRepository; @Autowired private SubjectMapper subjectMapper; @Autowired private SourceService sourceService; @Autowired private SourceMapper sourceMapper; /** * POST /subjects : Create a new subject. * * @param subjectDTO the subjectDTO to create * @return the ResponseEntity with status 201 (Created) and with body the new subjectDTO, or with status 400 (Bad Request) if the subject has already an ID * @throws URISyntaxException if the Location URI syntax is incorrect */ @PostMapping("/subjects") @Timed @Secured({AuthoritiesConstants.SYS_ADMIN, AuthoritiesConstants.PROJECT_ADMIN , AuthoritiesConstants.EXTERNAL_ERF_INTEGRATOR}) public ResponseEntity<SubjectDTO> createSubject(@RequestBody SubjectDTO subjectDTO) throws URISyntaxException, IllegalAccessException { log.debug("REST request to save Subject : {}", subjectDTO); if (subjectDTO.getId() != null) { return ResponseEntity.badRequest().headers(HeaderUtil.createFailureAlert(ENTITY_NAME, "idexists", "A new subject cannot already have an ID")).body(null); } if (subjectDTO.getLogin() == null) { return ResponseEntity.badRequest().headers(HeaderUtil.createFailureAlert(ENTITY_NAME, "loginrequired", "A subject login is required")).body(null); } if (subjectDTO.getEmail() == null) { return ResponseEntity.badRequest().headers(HeaderUtil.createFailureAlert(ENTITY_NAME, "patientEmailRequired", "A subject email is required")).body(null); } if (subjectDTO.getProject().getId() == null) { return ResponseEntity.badRequest().headers(HeaderUtil.createFailureAlert(ENTITY_NAME, "projectrequired", "A subject should be assigned to a project")).body(null); } if (subjectDTO.getExternalId() != null && !subjectDTO.getExternalId().isEmpty() && subjectRepository.findOneByProjectIdAndExternalId(subjectDTO.getProject().getId() , subjectDTO.getExternalId()).isPresent()) { return ResponseEntity.badRequest().headers(HeaderUtil .createFailureAlert(ENTITY_NAME, "subjectExists", "A subject with given project-id and external-id already exists")).body(null); } SubjectDTO result = subjectService.createSubject(subjectDTO); return ResponseEntity.created(new URI("/api/subjects/" + result.getId())) .headers(HeaderUtil.createEntityCreationAlert(ENTITY_NAME, result.getId().toString())) .body(result); } /** * PUT /subjects : Updates an existing subject. * * @param subjectDTO the subjectDTO to update * @return the ResponseEntity with status 200 (OK) and with body the updated subjectDTO, * or with status 400 (Bad Request) if the subjectDTO is not valid, * or with status 500 (Internal Server Error) if the subjectDTO couldnt be updated * @throws URISyntaxException if the Location URI syntax is incorrect */ @PutMapping("/subjects") @Timed @Secured({AuthoritiesConstants.SYS_ADMIN, AuthoritiesConstants.PROJECT_ADMIN , AuthoritiesConstants.EXTERNAL_ERF_INTEGRATOR}) public ResponseEntity<SubjectDTO> updateSubject(@RequestBody SubjectDTO subjectDTO) throws URISyntaxException, IllegalAccessException { log.debug("REST request to update Subject : {}", subjectDTO); if (subjectDTO.getId() == null) { return createSubject(subjectDTO); } SubjectDTO result = subjectService.updateSubject(subjectDTO); return ResponseEntity.ok() .headers(HeaderUtil.createEntityUpdateAlert(ENTITY_NAME, subjectDTO.getId().toString())) .body(result); } /** * GET /subjects : get all the subjects. * * @return the ResponseEntity with status 200 (OK) and the list of subjects in body */ @GetMapping("/subjects") @Timed public ResponseEntity<List<SubjectDTO>> getAllSubjects( @RequestParam(value = "projectId" , required = false) Long projectId, @RequestParam(value = "externalId" , required = false) String externalId) { log.error("ProjectID {} and external {}" , projectId, externalId); if(projectId!=null && externalId!=null) { Subject subject = subjectRepository.findOneByProjectIdAndExternalId(projectId, externalId).get(); SubjectDTO subjectDTO = subjectMapper.subjectToSubjectDTO(subject); return ResponseUtil.wrapOrNotFound(Optional.of(Collections.singletonList(subjectDTO))); } else if (projectId==null && externalId!=null) { List<Subject> subjects = subjectRepository.findAllByExternalId(externalId); return ResponseUtil.wrapOrNotFound(Optional.of(subjectMapper.subjectsToSubjectDTOs(subjects))); } else if( projectId!=null) { List<Subject> subjects = subjectRepository.findAllByProjectId(projectId); return ResponseUtil.wrapOrNotFound(Optional.of(subjectMapper.subjectsToSubjectDTOs(subjects))); } log.debug("REST request to get all Subjects"); return ResponseEntity.ok(subjectService.findAll()); } /** * GET /subjects/:id : get the "id" subject. * * @param id the id of the subjectDTO to retrieve * @return the ResponseEntity with status 200 (OK) and with body the subjectDTO, or with status 404 (Not Found) */ @GetMapping("/subjects/{id}") @Timed public ResponseEntity<SubjectDTO> getSubject(@PathVariable Long id) { log.debug("REST request to get Subject : {}", id); Subject subject = subjectRepository.findOneWithEagerRelationships(id); SubjectDTO subjectDTO = subjectMapper.subjectToSubjectDTO(subject); return ResponseUtil.wrapOrNotFound(Optional.ofNullable(subjectDTO)); } /** * DELETE /subjects/:id : delete the "id" subject. * * @param id the id of the subjectDTO to delete * @return the ResponseEntity with status 200 (OK) */ @DeleteMapping("/subjects/{id}") @Timed public ResponseEntity<Void> deleteSubject(@PathVariable Long id) { log.debug("REST request to delete Subject : {}", id); subjectRepository.delete(id); return ResponseEntity.ok().headers(HeaderUtil.createEntityDeletionAlert(ENTITY_NAME, id.toString())).build(); } /** * POST /subjects/sources: Assign a list of sources to the currently logged in user * * The request body should contain a list of sources to be assigned to the currently logged in * user. If the currently authenticated user is not a subject, or not a user * (e.g. client_credentials), an AccessDeniedException will be thrown. At minimum, each source * should define it's device type, like so: <code>[{"deviceType": { "id": 3 }}]</code>. A * source name and source ID will be automatically generated. The source ID will be a new random * UUID, and the source name will be the device model, appended with a dash and the first six * characters of the UUID. The sources will be created and assigned to the currently logged in * user. * * If you need to assign existing sources, simply specify either of id, sourceId, or sourceName * in the source object. * * @param sourceDTOS List of sources to assign * @return The updated Subject information */ @PostMapping("/subjects/sources") @Timed public ResponseEntity<SubjectDTO> assignSources(@RequestBody List<SourceDTO> sourceDTOS) { // find out if authenticated user is really a user String currentUser = SecurityUtils.getCurrentUserLogin(); if (currentUser == null) { throw new AccessDeniedException("Only a logged in user can assign sources this way"); } // find out if the login user is really a subject Subject subject = subjectRepository.findBySubjectLogin(currentUser); if (subject == null) { throw new AccessDeniedException("Only users that are subjects can be assigned sources"); } subject = subjectRepository.findOneWithEagerRelationships(subject.getId()); SubjectDTO result = subjectService.assignSourcesToSubject(subject, sourceDTOS); return ResponseEntity.ok().headers(HeaderUtil.createEntityUpdateAlert( ENTITY_NAME, subject.getId().toString())).body(result); } /** * GET /subjects/sources: Get the sources of the currently logged in user. * * @return The list of sources assigned to the currently logged in user */ @GetMapping("/subjects/sources") @Timed public ResponseEntity<List<SourceDTO>> getSources() { // find out if authenticated user is really a user String currentUser = SecurityUtils.getCurrentUserLogin(); if (currentUser == null) { throw new AccessDeniedException("Only a logged in user can get sources this way"); } // find out if the login user is really a subject Subject subject = subjectRepository.findBySubjectLogin(currentUser); if (subject == null) { throw new AccessDeniedException("Only users that are subjects can be assigned sources"); } subject = subjectRepository.findOneWithEagerRelationships(subject.getId()); List<SourceDTO> result = sourceMapper.sourcesToSourceDTOs(new ArrayList<>(subject.getSources())); return ResponseEntity.ok(result); } }
package org.sagebionetworks.web.client.view; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.sagebionetworks.repo.model.UserProfile; import org.sagebionetworks.repo.model.questionnaire.MultichoiceAnswer; import org.sagebionetworks.repo.model.questionnaire.MultichoiceQuestion; import org.sagebionetworks.repo.model.questionnaire.Question; import org.sagebionetworks.web.client.DisplayUtils; import org.sagebionetworks.web.client.IconsImageBundle; import org.sagebionetworks.web.client.SageImageBundle; import org.sagebionetworks.web.client.place.Help; import org.sagebionetworks.web.client.widget.entity.download.CertificateWidget; import org.sagebionetworks.web.client.widget.footer.Footer; import org.sagebionetworks.web.client.widget.header.Header; import org.sagebionetworks.web.client.widget.login.LoginWidget; import org.sagebionetworks.web.shared.WebConstants; import com.extjs.gxt.ui.client.widget.Window; import com.google.gwt.dom.client.DivElement; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.safehtml.shared.SimpleHtmlSanitizer; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.CheckBox; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.HTMLPanel; import com.google.gwt.user.client.ui.RadioButton; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; public class QuizViewImpl extends Composite implements QuizView { @UiField SimplePanel header; @UiField SimplePanel footer; @UiField HTMLPanel quizContainer; @UiField DivElement quizHighlightBox; @UiField FlowPanel testContainer; @UiField Button submitButton; @UiField SimplePanel successContainer; @UiField HTMLPanel failureContainer; @UiField Button tutorialButton; private Presenter presenter; private IconsImageBundle iconsImageBundle; private SageImageBundle sageImageBundle; private CertificateWidget certificateWidget; private Window loadingWindow; private Header headerWidget; private Footer footerWidget; public interface Binder extends UiBinder<Widget, QuizViewImpl> {} boolean isSubmitInitialized; Map<Long, List<Long>> questionIndex2AnswerIndices; @Inject public QuizViewImpl(Binder uiBinder, IconsImageBundle icons, Header headerWidget, Footer footerWidget, SageImageBundle sageImageBundle, LoginWidget loginWidget, CertificateWidget certificateWidget) { initWidget(uiBinder.createAndBindUi(this)); this.iconsImageBundle = icons; this.sageImageBundle = sageImageBundle; this.headerWidget = headerWidget; this.footerWidget = footerWidget; this.certificateWidget = certificateWidget; headerWidget.configure(false); header.add(headerWidget.asWidget()); footer.add(footerWidget.asWidget()); successContainer.setWidget(certificateWidget.asWidget()); isSubmitInitialized = false; questionIndex2AnswerIndices = new HashMap<Long, List<Long>>(); tutorialButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { presenter.goTo(new Help(WebConstants.USER_CERTIFICATION_TUTORIAL)); } }); } @Override public void setPresenter(Presenter loginPresenter) { this.presenter = loginPresenter; header.clear(); headerWidget.configure(false); header.add(headerWidget.asWidget()); footer.clear(); footer.add(footerWidget.asWidget()); com.google.gwt.user.client.Window.scrollTo(0, 0); // scroll user to top of page } @Override public void showErrorMessage(String message) { DisplayUtils.showErrorMessage(message); } @Override public void showLoading() { if(loadingWindow == null) { loadingWindow = DisplayUtils.createLoadingWindow(sageImageBundle, ""); } loadingWindow.show(); } @Override public void showInfo(String title, String message) { DisplayUtils.showInfo(title, message); } @Override public void clear() { hideAll(); testContainer.clear(); questionIndex2AnswerIndices.clear(); hideLoading(); } @Override public void showQuiz(String quizHeader, List<Question> quiz) { hideAll(); quizHighlightBox.setAttribute("title", quizHeader); //clear old questions clear(); int questionNumber = 1; for (Question question : quiz) { testContainer.add(addQuestion(questionNumber++, question)); } //initialize if necessary if (!isSubmitInitialized) { isSubmitInitialized = true; submitButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { //gather answers and pass them back to the presenter presenter.submitAnswers(questionIndex2AnswerIndices); } }); } quizContainer.setVisible(true); } @Override public void showSuccess(UserProfile profile) { hideAll(); certificateWidget.setProfile(profile); successContainer.setVisible(true); } @Override public void showFailure() { hideAll(); failureContainer.setVisible(true); } private FlowPanel addQuestion(int questionNumber, Question question) { FlowPanel questionContainer = new FlowPanel(); if (question instanceof MultichoiceQuestion) { final MultichoiceQuestion multichoiceQuestion = (MultichoiceQuestion)question; questionContainer.addStyleName("margin-bottom-40 margin-left-15"); questionContainer.add(new HTMLPanel("<h5 class=\"inline-block\"><small>"+questionNumber+". </small>"+SimpleHtmlSanitizer.sanitizeHtml(question.getPrompt()).asString()+"</small></h5>")); //now add possible answers boolean isRadioButton = multichoiceQuestion.getExclusive(); if (isRadioButton) { for (final MultichoiceAnswer answer : multichoiceQuestion.getAnswers()) { SimplePanel answerContainer = new SimplePanel(); answerContainer.addStyleName("radio margin-left-15"); RadioButton answerButton = new RadioButton("question-"+question.getQuestionIndex()); answerButton.setHTML(SimpleHtmlSanitizer.sanitizeHtml(answer.getPrompt())); answerButton.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { List<Long> answers = getAnswerIndexes(multichoiceQuestion.getQuestionIndex()); answers.clear(); answers.add(answer.getAnswerIndex()); } }); answerContainer.add(answerButton); questionContainer.add(answerContainer); } } else { //checkbox for (final MultichoiceAnswer answer : multichoiceQuestion.getAnswers()) { SimplePanel answerContainer = new SimplePanel(); answerContainer.addStyleName("checkbox margin-left-15"); final CheckBox checkbox= new CheckBox(); checkbox.setHTML(SimpleHtmlSanitizer.sanitizeHtml(answer.getPrompt())); checkbox.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { //not exclusive, include all possible answer indexes List<Long> answers = getAnswerIndexes(multichoiceQuestion.getQuestionIndex()); if (checkbox.getValue()) { if (!answers.contains(answer.getAnswerIndex())) answers.add(answer.getAnswerIndex()); } else { answers.remove(answer.getAnswerIndex()); } } }); answerContainer.add(checkbox); questionContainer.add(answerContainer); } } } return questionContainer; } private List<Long> getAnswerIndexes(Long questionIndex) { List<Long> answers = questionIndex2AnswerIndices.get(questionIndex); if (answers == null) { answers = new ArrayList<Long>(); questionIndex2AnswerIndices.put(questionIndex, answers); } return answers; } private void hideAll() { quizContainer.setVisible(false); successContainer.setVisible(false); failureContainer.setVisible(false); } @Override public void hideLoading() { if(loadingWindow != null) loadingWindow.hide(); } }
package ifc.style; import lib.MultiPropertyTest; /** * Testing <code>com.sun.star.style.ParagraphPropertiesAsian</code> * service properties : * <ul> * <li><code> ParaIsHangingPunctuation </code></li> * <li><code> ParaIsCharacterDistance </code></li> * <li><code> ParaIsForbiddenRules </code></li> * </ul> <p> * Properties testing is automated by <code>lib.MultiPropertyTest</code>. * @see com.sun.star.style.ParagraphProperties */ public class _ParagraphPropertiesAsian extends MultiPropertyTest { /** * Custom tester for properties which have <code>boolean</code> type * and can be void, so if they have void value, the new value must * be specified. Switches between true and false. */ protected PropertyTester BooleanTester = new PropertyTester() { protected Object getNewValue(String propName, Object oldValue) { if ((oldValue != null) && (oldValue.equals(new Boolean((boolean) false)))) { return new Boolean((boolean) true); } else { return new Boolean((boolean) false); } } }; /** * Tested with custom property tester. */ public void _ParaIsHangingPunctuation() { log.println("Testing with custom Property tester"); testProperty("ParaIsHangingPunctuation", BooleanTester); } /** * Tested with custom property tester. */ public void _ParaIsCharacterDistance() { log.println("Testing with custom Property tester"); testProperty("ParaIsCharacterDistance", BooleanTester); } /** * Tested with custom property tester. */ public void _ParaIsForbiddenRules() { log.println("Testing with custom Property tester"); testProperty("ParaIsForbiddenRules", BooleanTester); } } // finish class _ParagraphProperties
package com.mysema.query; import static com.mysema.query.Target.H2; import static com.mysema.query.Target.MYSQL; import static com.mysema.query.Target.POSTGRES; import static com.mysema.query.Target.SQLSERVER; import static com.mysema.query.Target.TERADATA; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.List; import org.geolatte.geom.Geometry; import org.geolatte.geom.LineString; import org.geolatte.geom.MultiLineString; import org.geolatte.geom.MultiPoint; import org.geolatte.geom.MultiPolygon; import org.geolatte.geom.Point; import org.geolatte.geom.Polygon; import org.geolatte.geom.codec.Wkt; import org.junit.Test; import com.google.common.collect.Lists; import com.mysema.query.spatial.PointExpression; import com.mysema.query.spatial.path.LineStringPath; import com.mysema.query.spatial.path.MultiLineStringPath; import com.mysema.query.spatial.path.MultiPointPath; import com.mysema.query.spatial.path.MultiPolygonPath; import com.mysema.query.spatial.path.PointPath; import com.mysema.query.spatial.path.PolygonPath; import com.mysema.query.sql.spatial.QShapes; import com.mysema.query.sql.spatial.QSpatialRefSys; import com.mysema.query.sql.spatial.Shapes; import com.mysema.query.types.ConstantImpl; import com.mysema.query.types.Expression; import com.mysema.testutil.ExcludeIn; import com.mysema.testutil.IncludeIn; public class SpatialBase extends AbstractBaseTest { private static final QShapes shapes = QShapes.shapes; // point 1-5 // linestring 6-7 // polygon 8-9 // multipoint 10-11 // multilinestring 12-13 // multipolygon 14-15 private TestQuery withPoints() { return query().from(shapes).where(shapes.id.between(1, 5)); } private TestQuery withLineStrings() { return query().from(shapes).where(shapes.id.between(6, 7)); } private TestQuery withPolygons() { return query().from(shapes).where(shapes.id.between(8, 9)); } private TestQuery withMultipoints() { return query().from(shapes).where(shapes.id.between(10, 11)); } private TestQuery withMultiLineStrings() { return query().from(shapes).where(shapes.id.between(12, 13)); } private TestQuery withMultiPolygons() { return query().from(shapes).where(shapes.id.between(14, 15)); } @Test @IncludeIn(POSTGRES) public void SpatialRefSys() { QSpatialRefSys spatialRefSys = QSpatialRefSys.spatialRefSys; query().from(spatialRefSys).list(spatialRefSys); } @Test // FIXME, maybe use enum as the type ?!? @ExcludeIn(H2) public void GeometryType() { List<Tuple> results = query().from(shapes).list(shapes.geometry, shapes.geometry.geometryType()); assertFalse(results.isEmpty()); for (Tuple row : results) { assertEquals( row.get(shapes.geometry).getGeometryType().name(), row.get(shapes.geometry.geometryType())); } } @Test public void AsText() { List<Tuple> results = query().from(shapes).list(shapes.geometry, shapes.geometry.asText()); assertFalse(results.isEmpty()); for (Tuple row : results) { if (!(row.get(shapes.geometry) instanceof MultiPoint)) { assertEquals( row.get(shapes.geometry).asText().replace(" ", ""), row.get(shapes.geometry.asText()).replace(" ", "")); } } } @Test @ExcludeIn(H2) public void Point_X_Y() { PointPath<Point> point = shapes.geometry.asPoint(); List<Tuple> results = withPoints().list(point, point.x(), point.y()); assertFalse(results.isEmpty()); for (Tuple row : results) { assertEquals(Double.valueOf(row.get(point).getX()), row.get(point.x())); assertEquals(Double.valueOf(row.get(point).getY()), row.get(point.y())); } } @Test @ExcludeIn(MYSQL) public void Point_Distance() { QShapes shapes1 = QShapes.shapes; QShapes shapes2 = new QShapes("shapes2"); for (Tuple tuple : query().from(shapes1, shapes2) .where(shapes1.id.loe(5), shapes2.id.loe(5)) .list(shapes1.geometry.asPoint(), shapes2.geometry.asPoint(), shapes1.geometry.distance(shapes2.geometry))) { Point point1 = tuple.get(shapes1.geometry.asPoint()); Point point2 = tuple.get(shapes2.geometry.asPoint()); Double distance = tuple.get(shapes1.geometry.distance(shapes2.geometry)); assertEquals(point1.distance(point2), distance.doubleValue(), 0.0001); } } @Test public void Point_Instances() { List<Shapes> results = withPoints().list(shapes); assertEquals(5, results.size()); for (Shapes row : results) { assertNotNull(row.getId()); assertNotNull(row.getGeometry()); assertTrue(row.getGeometry() instanceof Point); } } @Test public void LineString_Instances() { List<Geometry> results = withLineStrings().list(shapes.geometry); assertFalse(results.isEmpty()); for (Geometry row : results) { assertNotNull(row); assertTrue(row instanceof LineString); } } @Test public void Polygon_Instances() { List<Geometry> results = withPolygons().list(shapes.geometry); assertFalse(results.isEmpty()); for (Geometry row : results) { assertNotNull(row); assertTrue(row instanceof Polygon); } } @Test public void MultiPoint_Instances() { List<Geometry> results = withMultipoints().list(shapes.geometry); assertFalse(results.isEmpty()); for (Geometry row : results) { assertNotNull(row); assertTrue(row instanceof MultiPoint); } } @Test public void MultiLineString_Instances() { List<Geometry> results = withMultiLineStrings().list(shapes.geometry); assertFalse(results.isEmpty()); for (Geometry row : results) { assertNotNull(row); assertTrue(row instanceof MultiLineString); } } @Test public void MultiPolygon_Instances() { List<Geometry> results = withMultiPolygons().list(shapes.geometry); assertFalse(results.isEmpty()); for (Geometry row : results) { assertNotNull(row); assertTrue(row instanceof MultiPolygon); } } @Test public void Point_Methods() { PointPath<Point> point = shapes.geometry.asPoint(); List<Expression<?>> expressions = Lists.newArrayList(); add(expressions, point.asBinary(), H2); add(expressions, point.asText()); add(expressions, point.boundary(), MYSQL); add(expressions, point.convexHull(), MYSQL); add(expressions, point.dimension()); add(expressions, point.envelope(), H2); add(expressions, point.geometryType(), H2); add(expressions, point.isEmpty()); add(expressions, point.isSimple()); add(expressions, point.m(), MYSQL, TERADATA, H2); add(expressions, point.srid()); // point specific add(expressions, point.x(), H2); add(expressions, point.y(), H2); add(expressions, point.z(), MYSQL, TERADATA, H2); for (Expression<?> expr : expressions) { boolean logged = false; for (Object row : withPoints().list(expr)) { if (row == null && !logged) { System.err.println(expr.toString()); logged = true; } } } } private List<Expression<?>> createExpressions(PointExpression<Point> point1, Expression<Point> point2) { List<Expression<?>> expressions = Lists.newArrayList(); add(expressions, point1.contains(point2)); add(expressions, point1.crosses(point2)); add(expressions, point1.difference(point2), MYSQL); add(expressions, point1.disjoint(point2)); add(expressions, point1.distance(point2), MYSQL); add(expressions, point1.distanceSphere(point2), H2, MYSQL, SQLSERVER); add(expressions, point1.distanceSpheroid(point2), H2, MYSQL, POSTGRES, SQLSERVER); add(expressions, point1.eq(point2)); add(expressions, point1.intersection(point2), MYSQL); add(expressions, point1.intersects(point2)); add(expressions, point1.overlaps(point2)); add(expressions, point1.symDifference(point2), MYSQL); add(expressions, point1.touches(point2)); add(expressions, point1.union(point2), MYSQL); add(expressions, point1.within(point2)); return expressions; } @Test public void Point_Methods2() { QShapes shapes1 = QShapes.shapes; QShapes shapes2 = new QShapes("shapes2"); List<Expression<?>> expressions = Lists.newArrayList(); expressions.addAll(createExpressions(shapes1.geometry.asPoint(), shapes2.geometry.asPoint())); expressions.addAll(createExpressions(shapes1.geometry.asPoint(), ConstantImpl.create((Point)Wkt.fromWkt("Point(2 2)")))); for (Expression<?> expr : expressions) { boolean logged = false; for (Object row : query().from(shapes1, shapes2) .where(shapes1.id.loe(5), shapes2.id.loe(5)).list(expr)) { if (row == null && !logged) { System.err.println(expr.toString()); logged = true; } } } } @Test public void LineString_Methods() { LineStringPath<LineString> lineString = shapes.geometry.asLineString(); List<Expression<?>> expressions = Lists.newArrayList(); add(expressions, lineString.asBinary(), H2); add(expressions, lineString.asText()); add(expressions, lineString.boundary(), MYSQL); add(expressions, lineString.convexHull(), MYSQL); add(expressions, lineString.dimension()); add(expressions, lineString.envelope(), H2); add(expressions, lineString.geometryType(), H2); add(expressions, lineString.isEmpty()); add(expressions, lineString.isSimple()); // curve specific add(expressions, lineString.length(), H2); add(expressions, lineString.startPoint(), H2); add(expressions, lineString.endPoint(), H2); add(expressions, lineString.isClosed(), H2); add(expressions, lineString.isRing(), H2, MYSQL); // linestring specific add(expressions, lineString.numPoints(), H2); add(expressions, lineString.pointN(1), H2); for (Expression<?> expr : expressions) { boolean logged = false; for (Object row : withLineStrings().list(expr)) { if (row == null && !logged) { System.err.println(expr.toString()); logged = true; } } } } @Test public void Polygon_Methods() { PolygonPath<Polygon> polygon = shapes.geometry.asPolygon(); List<Expression<?>> expressions = Lists.newArrayList(); add(expressions, polygon.asBinary(), H2); add(expressions, polygon.asText()); add(expressions, polygon.boundary(), MYSQL); add(expressions, polygon.convexHull(), MYSQL); add(expressions, polygon.dimension()); add(expressions, polygon.envelope(), H2); add(expressions, polygon.geometryType(), H2); add(expressions, polygon.isEmpty()); add(expressions, polygon.isSimple()); // surface specific add(expressions, polygon.area()); add(expressions, polygon.centroid()); add(expressions, polygon.pointOnSurface(), H2, MYSQL); // polygon specific add(expressions, polygon.exterorRing(), H2); add(expressions, polygon.numInteriorRing(), H2); add(expressions, polygon.interiorRingN(1), H2); for (Expression<?> expr : expressions) { boolean logged = false; for (Object row : withPolygons().list(expr)) { if (row == null && !logged) { System.err.println(expr.toString()); logged = true; } } } } @Test public void MultiPoint_Methods() { MultiPointPath<MultiPoint> multipoint = shapes.geometry.asMultiPoint(); List<Expression<?>> expressions = Lists.newArrayList(); add(expressions, multipoint.asBinary(), H2); add(expressions, multipoint.asText()); add(expressions, multipoint.boundary(), MYSQL); add(expressions, multipoint.convexHull(), MYSQL); add(expressions, multipoint.dimension()); add(expressions, multipoint.envelope(), H2); add(expressions, multipoint.geometryType(), H2); add(expressions, multipoint.isEmpty()); add(expressions, multipoint.isSimple()); // multipoint specific add(expressions, multipoint.numGeometries(), H2); add(expressions, multipoint.geometryN(1), H2); for (Expression<?> expr : expressions) { boolean logged = false; for (Object row : withMultipoints().list(expr)) { if (row == null && !logged) { System.err.println(expr.toString()); logged = true; } } } } @Test public void MultiLineString_Methods() { MultiLineStringPath<MultiLineString> multilinestring = shapes.geometry.asMultiLineString(); List<Expression<?>> expressions = Lists.newArrayList(); add(expressions, multilinestring.asBinary(), H2); add(expressions, multilinestring.asText()); add(expressions, multilinestring.boundary(), MYSQL); add(expressions, multilinestring.convexHull(), MYSQL); add(expressions, multilinestring.dimension()); add(expressions, multilinestring.envelope(), H2); add(expressions, multilinestring.geometryType(), H2); add(expressions, multilinestring.isEmpty()); add(expressions, multilinestring.isSimple()); // multicurve specific add(expressions, multilinestring.isClosed(), H2); add(expressions, multilinestring.length(), H2); // multilinestring specific add(expressions, multilinestring.numGeometries(), H2); add(expressions, multilinestring.geometryN(1), H2); for (Expression<?> expr : expressions) { boolean logged = false; for (Object row : withMultiLineStrings().list(expr)) { if (row == null && !logged) { System.err.println(expr.toString()); logged = true; } } } } @Test public void MultiPolygon_Methods() { MultiPolygonPath<MultiPolygon> multipolygon = shapes.geometry.asMultiPolygon(); List<Expression<?>> expressions = Lists.newArrayList(); add(expressions, multipolygon.asBinary(), H2); add(expressions, multipolygon.asText()); add(expressions, multipolygon.boundary(), MYSQL); add(expressions, multipolygon.convexHull(), MYSQL); add(expressions, multipolygon.dimension()); add(expressions, multipolygon.envelope(), H2); add(expressions, multipolygon.geometryType(), H2); add(expressions, multipolygon.isEmpty()); add(expressions, multipolygon.isSimple()); // multipolygon specific add(expressions, multipolygon.numGeometries(), H2); add(expressions, multipolygon.geometryN(1), H2); for (Expression<?> expr : expressions) { boolean logged = false; for (Object row : withMultiPolygons().list(expr)) { if (row == null && !logged) { System.err.println(expr.toString()); logged = true; } } } } }
package pete.metrics.adaptability.elements; class EventElements extends ElementsCollection { public EventElements() { buildTopLevelStartEvents(); buildEventSubProcessStartEvents(); buildEndEvents(); buildIntermediateEvents(); } public String buildEventXPathExpression(String event, String eventType) {