-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathjs-binding.d.ts
131 lines (129 loc) · 3.3 KB
/
js-binding.d.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
/* tslint:disable */
/* eslint-disable */
/* auto-generated by NAPI-RS */
export interface KafkaEventPayload {
action?: string
tpl: Array<TopicPartition>
error?: string
}
export enum KafkaEventName {
PreRebalance = 'PreRebalance',
PostRebalance = 'PostRebalance',
CommitCallback = 'CommitCallback'
}
export interface KafkaEvent {
name: KafkaEventName
payload: KafkaEventPayload
}
export interface RetryStrategy {
retries: number
retryTopic?: string
dqlTopic?: string
pauseConsumerDuration?: number
offset?: OffsetModel
configuration?: Record<string, string>
}
export enum CommitMode {
Sync = 0,
Async = 1
}
export interface ConsumerConfiguration {
groupId: string
createTopic?: boolean
enableAutoCommit?: boolean
configuration?: Record<string, string>
fecthMetadataTimeout?: number
}
export enum PartitionPosition {
Beginning = 'Beginning',
End = 'End',
Stored = 'Stored',
Invalid = 'Invalid'
}
export interface OffsetModel {
offset?: number
position?: PartitionPosition
}
export interface PartitionOffset {
partition: number
offset: OffsetModel
}
export interface TopicPartitionConfig {
topic: string
allOffsets?: OffsetModel
partitionOffset?: Array<PartitionOffset>
}
export interface TopicPartition {
topic: string
partitionOffset: Array<PartitionOffset>
}
export enum SecurityProtocol {
Plaintext = 'Plaintext',
Ssl = 'Ssl',
SaslPlaintext = 'SaslPlaintext',
SaslSsl = 'SaslSsl'
}
export interface KafkaConfiguration {
brokers: string
clientId: string
securityProtocol?: SecurityProtocol
configuration?: Record<string, string>
logLevel?: string
brokerAddressFamily?: string
}
export interface Message {
payload: Buffer
key?: Buffer
headers?: Record<string, Buffer>
topic: string
partition: number
offset: number
}
export interface RecordMetadata {
topic: string
partition: number
offset: number
error?: KafkaCrabError
}
export interface MessageProducer {
payload: Buffer
key?: Buffer
headers?: Record<string, Buffer>
}
export interface ProducerRecord {
topic: string
messages: Array<MessageProducer>
}
export interface KafkaCrabError {
code: number
message: string
}
export interface ProducerConfiguration {
queueTimeout?: number
thrownOnError?: boolean
autoFlush?: boolean
configuration?: Record<string, string>
}
export declare class KafkaConsumer {
onEvents(callback: (error: Error | undefined, event: KafkaEvent) => void): void
subscribe(topicConfigs: string | Array<TopicPartitionConfig>): Promise<void>
pause(): void
resume(): void
unsubscribe(): void
disconnect(): Promise<void>
seek(topic: string, partition: number, offsetModel: OffsetModel, timeout?: number | undefined | null): void
assignment(): Array<TopicPartition>
recv(): Promise<Message | null>
commit(topic: string, partition: number, offset: number, commit: CommitMode): void
}
export declare class KafkaClientConfig {
readonly kafkaConfiguration: KafkaConfiguration
constructor(kafkaConfiguration: KafkaConfiguration)
createProducer(producerConfiguration: ProducerConfiguration): KafkaProducer
createConsumer(consumerConfiguration: ConsumerConfiguration): KafkaConsumer
}
export declare class KafkaProducer {
inFlightCount(): number
flush(): Promise<Array<RecordMetadata>>
send(producerRecord: ProducerRecord): Promise<Array<RecordMetadata>>
}