flink-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mushketyk <...@git.apache.org>
Subject [GitHub] flink pull request #2487: [FLINK-4520][flink-siddhi] Integrate Siddhi as a l...
Date Mon, 12 Sep 2016 20:23:38 GMT
Github user mushketyk commented on a diff in the pull request:

    https://github.com/apache/flink/pull/2487#discussion_r78447224
  
    --- Diff: flink-contrib/flink-siddhi/src/main/java/org/apache/flink/contrib/siddhi/operator/AbstractSiddhiOperator.java
---
    @@ -0,0 +1,265 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + * <p/>
    + * http://www.apache.org/licenses/LICENSE-2.0
    + * <p/>
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.flink.contrib.siddhi.operator;
    +
    +import org.apache.flink.api.common.ExecutionConfig;
    +import org.apache.flink.contrib.siddhi.exception.UndefinedStreamException;
    +import org.apache.flink.contrib.siddhi.schema.StreamSchema;
    +import org.apache.flink.core.fs.FSDataInputStream;
    +import org.apache.flink.core.fs.FSDataOutputStream;
    +import org.apache.flink.core.memory.DataInputView;
    +import org.apache.flink.core.memory.DataInputViewStreamWrapper;
    +import org.apache.flink.core.memory.DataOutputView;
    +import org.apache.flink.core.memory.DataOutputViewStreamWrapper;
    +import org.apache.flink.streaming.api.TimeCharacteristic;
    +import org.apache.flink.streaming.api.graph.StreamConfig;
    +import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
    +import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
    +import org.apache.flink.streaming.api.operators.Output;
    +import org.apache.flink.streaming.api.watermark.Watermark;
    +import org.apache.flink.streaming.runtime.streamrecord.MultiplexingStreamRecordSerializer;
    +import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
    +import org.apache.flink.streaming.runtime.tasks.StreamTask;
    +import org.slf4j.Logger;
    +import org.slf4j.LoggerFactory;
    +import org.wso2.siddhi.core.ExecutionPlanRuntime;
    +import org.wso2.siddhi.core.SiddhiManager;
    +import org.wso2.siddhi.core.stream.input.InputHandler;
    +import org.wso2.siddhi.query.api.definition.AbstractDefinition;
    +
    +import java.io.IOException;
    +import java.io.ObjectInputStream;
    +import java.io.ObjectOutputStream;
    +import java.util.HashMap;
    +import java.util.Map;
    +import java.util.PriorityQueue;
    +
    +public abstract class AbstractSiddhiOperator<IN, OUT> extends AbstractStreamOperator<OUT>
implements OneInputStreamOperator<IN, OUT> {
    +	private static final Logger LOGGER = LoggerFactory.getLogger(AbstractSiddhiOperator.class);
    +	private static final int INITIAL_PRIORITY_QUEUE_CAPACITY = 11;
    +
    +	private final SiddhiOperatorContext siddhiPlan;
    +	private final String executionExpression;
    +	private final boolean isProcessingTime;
    +	private final Map<String, MultiplexingStreamRecordSerializer<IN>> streamRecordSerializers;
    +
    +	private transient SiddhiManager siddhiManager;
    +	private transient ExecutionPlanRuntime siddhiRuntime;
    +	private transient Map<String, InputHandler> inputStreamHandlers;
    +
    +	// queue to buffer out of order stream records
    +	private transient PriorityQueue<StreamRecord<IN>> priorityQueue;
    +
    +	/**
    +	 * @param siddhiPlan Siddhi CEP  Execution Plan
    +	 */
    +	public AbstractSiddhiOperator(SiddhiOperatorContext siddhiPlan) {
    +		validate(siddhiPlan);
    +		this.executionExpression = siddhiPlan.getFinalExecutionPlan();
    +		this.siddhiPlan = siddhiPlan;
    +		this.isProcessingTime = this.siddhiPlan.getTimeCharacteristic() == TimeCharacteristic.ProcessingTime;
    +		this.streamRecordSerializers = new HashMap<>();
    +
    +		for (String streamId : this.siddhiPlan.getInputStreams()) {
    +			streamRecordSerializers.put(streamId, createStreamRecordSerializer(this.siddhiPlan.getInputStreamSchema(streamId),
this.siddhiPlan.getExecutionConfig()));
    +		}
    +	}
    +
    +	protected abstract MultiplexingStreamRecordSerializer<IN> createStreamRecordSerializer(StreamSchema
streamSchema, ExecutionConfig executionConfig);
    +
    +	protected MultiplexingStreamRecordSerializer<IN> getStreamRecordSerializer(String
streamId) {
    +		if (streamRecordSerializers.containsKey(streamId)) {
    +			return streamRecordSerializers.get(streamId);
    +		} else {
    +			throw new UndefinedStreamException("Stream " + streamId + " not defined");
    +		}
    +	}
    +
    +	@Override
    +	public void processElement(StreamRecord<IN> element) throws Exception {
    --- End diff --
    
    This code seems to be similar to the code from CEP library. Can we reuse it somehow?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

Mime
View raw message