influxdb
Latest release
- conduit-connector-influxdb_0.1.0_Darwin_arm64.tar.gz
- conduit-connector-influxdb_0.1.0_Darwin_x86_64.tar.gz
- conduit-connector-influxdb_0.1.0_Linux_arm64.tar.gz
- conduit-connector-influxdb_0.1.0_Linux_i386.tar.gz
- conduit-connector-influxdb_0.1.0_Linux_x86_64.tar.gz
- conduit-connector-influxdb_0.1.0_Windows_arm64.tar.gz
- conduit-connector-influxdb_0.1.0_Windows_i386.tar.gz
- conduit-connector-influxdb_0.1.0_Windows_x86_64.tar.gz
Description
Source and destination connector for InfluxDB.
Source Parameters
version: 2.2
pipelines:
- id: example
status: running
connectors:
- id: example-source
type: source
plugin: "influxdb"
name: example-source
settings:
# Bucket specifies the InfluxDB bucket for reading or writing data.
# Type: string
bucket: ""
# Measurement typically tracks one kind of metric over time similar to
# a table. Here we have measurement and its unique key field in map.
# Type: string
measurements.*: ""
# Org is an organization name or ID.
# Type: string
org: ""
# Token is used to authenticate API access.
# Type: string
token: ""
# Url is the remote influxdb host for api calls.
# Type: string
url: ""
# This period is used by workers to poll for new data at regular
# intervals.
# Type: duration
pollingPeriod: "5s"
# The maximum number of retries of failed operations.
# Type: int
retries: "0"
# Maximum delay before an incomplete batch is read from the source.
# Type: duration
sdk.batch.delay: "0"
# Maximum size of batch before it gets read from the source.
# Type: int
sdk.batch.size: "0"
# Specifies whether to use a schema context name. If set to false, no
# schema context name will be used, and schemas will be saved with the
# subject name specified in the connector (not safe because of name
# conflicts).
# Type: bool
sdk.schema.context.enabled: "true"
# Schema context name to be used. Used as a prefix for all schema
# subject names. If empty, defaults to the connector ID.
# Type: string
sdk.schema.context.name: ""
# Whether to extract and encode the record key with a schema.
# Type: bool
sdk.schema.extract.key.enabled: "true"
# The subject of the key schema. If the record metadata contains the
# field "opencdc.collection" it is prepended to the subject name and
# separated with a dot.
# Type: string
sdk.schema.extract.key.subject: "key"
# Whether to extract and encode the record payload with a schema.
# Type: bool
sdk.schema.extract.payload.enabled: "true"
# The subject of the payload schema. If the record metadata contains
# the field "opencdc.collection" it is prepended to the subject name
# and separated with a dot.
# Type: string
sdk.schema.extract.payload.subject: "payload"
# The type of the payload schema.
# Type: string
sdk.schema.extract.type: "avro"
Destination Parameters
version: 2.2
pipelines:
- id: example
status: running
connectors:
- id: example-destination
type: destination
plugin: "influxdb"
name: example-destination
settings:
# Bucket specifies the InfluxDB bucket for reading or writing data.
# Type: string
bucket: ""
# Org is an organization name or ID.
# Type: string
org: ""
# Token is used to authenticate API access.
# Type: string
token: ""
# Url is the remote influxdb host for api calls.
# Type: string
url: ""
# Measurement is the measurement name to insert data into.
# Type: string
measurement: "{{ index .Metadata "opencdc.collection" }}"
# Maximum delay before an incomplete batch is written to the
# destination.
# Type: duration
sdk.batch.delay: "0"
# Maximum size of batch before it gets written to the destination.
# Type: int
sdk.batch.size: "0"
# Allow bursts of at most X records (0 or less means that bursts are
# not limited). Only takes effect if a rate limit per second is set.
# Note that if `sdk.batch.size` is bigger than `sdk.rate.burst`, the
# effective batch size will be equal to `sdk.rate.burst`.
# Type: int
sdk.rate.burst: "0"
# Maximum number of records written per second (0 means no rate
# limit).
# Type: float
sdk.rate.perSecond: "0"
# The format of the output record. See the Conduit documentation for a
# full list of supported formats
# (https://conduit.io/docs/using/connectors/configuration-parameters/output-format).
# Type: string
sdk.record.format: "opencdc/json"
# Options to configure the chosen output record format. Options are
# normally key=value pairs separated with comma (e.g.
# opt1=val2,opt2=val2), except for the `template` record format, where
# options are a Go template.
# Type: string
sdk.record.format.options: ""
# Whether to extract and decode the record key with a schema.
# Type: bool
sdk.schema.extract.key.enabled: "true"
# Whether to extract and decode the record payload with a schema.
# Type: bool
sdk.schema.extract.payload.enabled: "true"