0
0
Fork 0
mirror of https://github.com/matrix-org/dendrite synced 2024-12-30 14:13:43 +01:00
dendrite/cmd/kafka-producer/main.go
Kegsay b6ea1bc67a
Support sqlite in addition to postgres (#869)
* Move current work into single branch

* Initial massaging of clientapi etc (not working yet)

* Interfaces for accounts/devices databases

* Duplicate postgres package for sqlite3 (no changes made to it yet)

* Some keydb, accountdb, devicedb, common partition fixes, some more syncapi tweaking

* Fix accounts DB, device DB

* Update naffka dependency for SQLite

* Naffka SQLite

* Update naffka to latest master

* SQLite support for federationsender

* Mostly not-bad support for SQLite in syncapi (although there are problems where lots of events get classed incorrectly as backward extremities, probably because of IN/ANY clauses that are badly supported)

* Update Dockerfile -> Go 1.13.7, add build-base (as gcc and friends are needed for SQLite)

* Implement GET endpoints for account_data in clientapi

* Nuke filtering for now...

* Revert "Implement GET endpoints for account_data in clientapi"

This reverts commit 4d80dff458.

* Implement GET endpoints for account_data in clientapi (#861)

* Implement GET endpoints for account_data in clientapi

* Fix accountDB parameter

* Remove fmt.Println

* Fix insertAccountData SQLite query

* Fix accountDB storage interfaces

* Add empty push rules into account data on account creation (#862)

* Put SaveAccountData into the right function this time

* Not sure if roomserver is better or worse now

* sqlite work

* Allow empty last sent ID for the first event

* sqlite: room creation works

* Support sending messages

* Nuke fmt.println

* Move QueryVariadic etc into common, other device fixes

* Fix some linter issues

* Fix bugs

* Fix some linting errors

* Fix errcheck lint errors

* Make naffka use postgres as fallback, fix couple of compile errors

* What on earth happened to the /rooms/{roomID}/send/{eventType} routing

Co-authored-by: Neil Alexander <neilalexander@users.noreply.github.com>
2020-02-13 17:27:33 +00:00

93 lines
2.5 KiB
Go

// Copyright 2017 Vector Creations Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"bufio"
"flag"
"fmt"
"os"
"strings"
sarama "gopkg.in/Shopify/sarama.v1"
)
const usage = `Usage: %s
Reads a list of newline separated messages from stdin and writes them to a single partition in kafka.
Arguments:
`
var (
brokerList = flag.String("brokers", os.Getenv("KAFKA_PEERS"), "The comma separated list of brokers in the Kafka cluster. You can also set the KAFKA_PEERS environment variable")
topic = flag.String("topic", "", "REQUIRED: the topic to produce to")
partition = flag.Int("partition", 0, "The partition to produce to. All the messages will be written to this partition.")
)
func main() {
flag.Usage = func() {
fmt.Fprintf(os.Stderr, usage, os.Args[0])
flag.PrintDefaults()
}
flag.Parse()
if *brokerList == "" {
fmt.Fprintln(os.Stderr, "no -brokers specified. Alternatively, set the KAFKA_PEERS environment variable")
os.Exit(1)
}
if *topic == "" {
fmt.Fprintln(os.Stderr, "no -topic specified")
os.Exit(1)
}
config := sarama.NewConfig()
config.Producer.RequiredAcks = sarama.WaitForAll
config.Producer.Return.Successes = true
config.Producer.Partitioner = sarama.NewManualPartitioner
producer, err := sarama.NewSyncProducer(strings.Split(*brokerList, ","), config)
if err != nil {
fmt.Fprintln(os.Stderr, "Failed to open Kafka producer:", err)
os.Exit(1)
}
defer func() {
if err := producer.Close(); err != nil {
fmt.Fprintln(os.Stderr, "Failed to close Kafka producer cleanly:", err)
}
}()
scanner := bufio.NewScanner(os.Stdin)
for scanner.Scan() {
line := scanner.Bytes()
message := &sarama.ProducerMessage{
Topic: *topic,
Partition: int32(*partition),
Value: sarama.ByteEncoder(line),
}
if _, _, err := producer.SendMessage(message); err != nil {
fmt.Fprintln(os.Stderr, "Failed to send message:", err)
os.Exit(1)
}
}
if err := scanner.Err(); err != nil {
fmt.Fprintln(os.Stderr, "reading standard input:", err)
}
}