public MockKafkaStream(int numStreams) { this.queues = Lists.newArrayList(); this.mockStreams = Lists.newArrayList(); this.offsets = Lists.newArrayList(); for (int i = 0; i < numStreams; i++) { BlockingQueue<FetchedDataChunk> queue = Queues.newLinkedBlockingQueue(); this.queues.add(queue); this.mockStreams.add(createMockStream(queue)); this.offsets.add(new AtomicLong(0)); } this.nextStream = new AtomicLong(-1); }
@SuppressWarnings("unchecked") private static KafkaStream<byte[], byte[]> createMockStream(BlockingQueue<FetchedDataChunk> queue) { KafkaStream<byte[], byte[]> stream = (KafkaStream<byte[], byte[]>) Mockito.mock(KafkaStream.class); ConsumerIterator<byte[], byte[]> it = new ConsumerIterator<>(queue, -1, new DefaultDecoder(new VerifiableProperties()), new DefaultDecoder(new VerifiableProperties()), "clientId"); Mockito.when(stream.iterator()).thenReturn(it); return stream; }
public void pushToStream(String message) { int streamNo = (int) this.nextStream.incrementAndGet() % this.queues.size(); AtomicLong offset = this.offsets.get(streamNo); BlockingQueue<FetchedDataChunk> queue = this.queues.get(streamNo); AtomicLong thisOffset = new AtomicLong(offset.incrementAndGet()); List<Message> seq = Lists.newArrayList(); seq.add(new Message(message.getBytes(Charsets.UTF_8))); ByteBufferMessageSet messageSet = new ByteBufferMessageSet(NoCompressionCodec$.MODULE$, offset, JavaConversions.asScalaBuffer(seq)); FetchedDataChunk chunk = new FetchedDataChunk(messageSet, new PartitionTopicInfo("topic", streamNo, queue, thisOffset, thisOffset, new AtomicInteger(1), "clientId"), thisOffset.get()); queue.add(chunk); }
public void shutdown() { for (BlockingQueue<FetchedDataChunk> queue : this.queues) { queue.add(ZookeeperConsumerConnector.shutdownCommand()); } }