Versions Compared

Key

  • This line was added.
  • This line was removed.
  • Formatting was changed.

...

Code Block
languagebash
titleibsnow.properties
# The IBSNOW instance friendly name. If ommittedomitted, it will become 'IBSNOW-ec2vm-instance-id'
#ibsnow_instance_name =  

# The Cloud region the IoT Bridge for Snowflake instance is in
# ibsnow_cloud_region = us-east-1

# MQTT Server definitions. IoT Bridge for Snowflake supports multiple MQTT Servers. Each definition must include and 'index' as shown
# below represented by 'X'. The first should begin with 1 and each additional server definition should have an index of 1 greater
# than the previous.
# mqtt_server_url.X                                         # The MQTT Server URL
# mqtt_server_name.X                    #                   # The MQTT Server name
# mqtt_username.X                                            # The MQTT username (if required by the MQTT Server)
# mqtt_password.X                        # The MQTT password        # The MQTT password (if required by the MQTT Server)
# mqtt_keepalive_timeout.X              #              # The MQTT keep-alive timeout in seconds
# mqtt_ca_cert_chain_path.X                        # The filepathpath to the TLS Certificate Authority certificate chain
# mqtt_client_cert_path.X                            # The filepathpath to the Device TLS certificate
# mqtt_client_private_key_path.X            ## The filepathpath to the Device TLS private key
# mqtt_client_private_key_password.X      ## The Device TLS private key password
# mqtt_verify_hostname.X                #            # Whether or not to verify the hostname against the server certificate
# mqtt_client_id.X                      #                      # The Client ID of the MQTT Client
# mqtt_sparkplug_subscriptions.X            ## The Sparkplug subscriptions to issue when connecting to the MQTT Server.
                                                           # By default this is spBv1.0/# but can be scoped more narrowly (e.g. spBv1.0/Group1/#)
                                                                               # It can also be a comma separated list (e.g. spBv1.0/Group1/#,spBv1.0/Group2/#)

mqtt_server_url.1 = ssl://REPLACE_WITH_MQTT_SERVER_ENDPOINT:8883
mqtt_server_name.1 = My MQTT Server
mqtt_sparkplug_subscriptions.1 = spBv1.0/#
#mqtt_keepalive_timeout.1 = 30
#mqtt_verify_hostname.1
# Enable Snowflake Sparkplug MQTT Application
snowflake_application_enabled = true
#mqtt_username.1 =
#mqtt_password.1 =
#mqtt_ca_cert_chain_path.1 = /opt/ibsnow/conf/certs/myCACert.pem
#mqtt_client_cert_path

# Enable Snowflake Raw MQTT Application
snowflake_mqtt_application_enabled = true

# Comma separated list of Sparkplug subscriptions
mqtt_sparkplug_subscriptions.1 = /opt/ibsnow/conf/certs/myClientCert.pem
#mqtt_client_private_key_path.1 = /opt/ibsnow/conf/certs/myPrivateKey.pem
#mqtt_client_private_key_password.1 =
#mqtt_client_id.1 =

# The Sparkplug sequence reordering timeout in milliseconds
sequence_reordering_timeout = 5000

# Whether or not to block auto-rebirth requests
#block_auto_rebirth = false

# The primary host ID if this is the acting primary host
#primary_host_id =

# Snowflake streaming connection properties - A custom client name for the connection (e.g. MyClient)
snowflake_streaming_client_name = IBSNOWClientspBv1.0/#

# Comma separated list of MQTT subscription topic:QoS pairs (e.g. a/#:0,b/#:0)
# mqtt_subscriptions.1 =

#mqtt_keepalive_timeout.1 = 30
#mqtt_verify_hostname.1 = true
#mqtt_username.1 =
#mqtt_password.1 =
#mqtt_ca_cert_chain_path.1 = /opt/ibsnow/conf/certs/
#mqtt_client_cert_path.1 = /opt/ibsnow/conf/certs/
#mqtt_client_private_key_path.1 = /opt/ibsnow/conf/certs/
#mqtt_client_private_key_password.1 =
#mqtt_client_id.1 =

# The Sparkplug sequence reordering timeout in milliseconds
sequence_reordering_timeout = 5000

# Whether or not to block auto-rebirth requests
#block_auto_rebirth = false

# The primary host ID if this is the acting primary host
#primary_host_id =

# Snowflake streaming connection properties - A custom client name for the connection (e.g. MyClient)
#snowflake_streaming_client_name = IBSNOWClient

# Maximum number of rows to insert into the snowflake_streaming_table at once. Set to 0 for no limit.
snowflake_max_streaming_insert_batch_size = 0

# Maximum number of rows to insert into the snowflake_mqtt_streaming_table at once. Set to 0 for no limit.  
snowflake_max_mqtt_streaming_insert_batch_size = 0

# Do raw MQTT inserts one topic at a time
snowflake_topic_based_mqtt_streaming_insert = false

# Ingest task execution period (in seconds)
snowflake_streaming_ingest_task_period = 1

# Raw MQTT ingest task execution period (in seconds) 
snowflake_mqtt_streaming_ingest_task_period = 1


# Snowflake streaming connection properties - The scheme to use for channels and their names
# This MUST be one of the following: STATIC, GROUP_ID, EDGE_ID
# STATIC - means to use a single channel. If using this mode, the snowflake_streaming_channel_name
# GROUP_ID - means to use the Sparkplug Group ID for the channel name on incoming data
# EDGE_ID - means to use the Sparkplug Group ID and the Edge Node ID for the channel name on incoming data
# DEVICE_ID - means to use the Sparkplug Group ID, Edge Node ID, and Device ID for the channel name on incoming data
snowflake_streaming_channel_scheme = EDGE_ID

# Snowflake streaming connection properties - A custom channel name for the connection (e.g. MyChannel)
# If this is left blank/empty, Channel names of the Sparkplug Group ID will be used instead of a single channel
# snowflake_streaming_channel_name = If this is left blank/empty, Channel names of the Sparkplug Group ID will be used instead of a single channel
# snowflake_streaming_channel_name =

# Snowflake streaming connection properties - The Table name for Sparkplug data associated with the Database and Schema already provisioned in the Snowflake account (e.g. MyTable)
snowflake_streaming_table_name = SPARKPLUG_RAW

# Snowflake streaming connection properties - The Table name associatedfor RAW with the Database and Schema already provisioned in the Snowflake account (e.g. MyTable)
snowflake_streaming_table_name = SPARKPLUG_RAWMQTT data.
snowflake_mqtt_streaming_table_name = MQTT_RAW

# Maximum number of streaming channels for RAW MQTT messages
# snowflake_max_mqtt_streaming_channels =

# Snowflake notify connection properties - The Database name associated with the connection that is already provisioned in the Snowflake account (e.g. MyDb)
snowflake_notify_db_name = cl_bridge_node_db

# Snowflake notify connection properties - The Schema name associated with the Database already provisioned in the Snowflake account (e.g. PUBLIC)
snowflake_notify_schema_name = stage_db

# Snowflake notify connection properties - The Warehouse name associated with the notifications already provisioned in the Snowflake account (e.g. PUBLIC)
snowflake_notify_warehouse_name = cl_bridge_ingest_wh

# Whether or not to create and update IBSNOW infomationalinformational tracking metrics
# ibsnow_metrics_enabled = true

# The Sparkplug Group ID to use for IBSNOW asset names
ibsnow_metrics_sparkplug_group_group_id = IBSNOW

# The 'Bridge Info' Sparkplug Edge Node ID to use for IBSNOW assets
ibsnow_metrics_bridge_info_sparkplug_edge_node_id = Bridge IBSNOWInfo

# The 'BridgeEdge Node Info' Sparkplug Edge Node ID to use for IBSNOW assets
ibsnow_metrics_edge_bridgenode_info_sparkplug_edge_node_id = Edge BridgeNode Info

# The 'EdgeMQTT NodeClient Info' Sparkplug Edge Node ID to use for IBSNOW assets
ibsnow_metrics_edgemqtt_nodeclient_info_sparkplug_edge_node_id = EdgeMQTT NodeClient Info

# The 'MQTT Client Info' Sparkplug Edge Node IDWhether or not to send notification tasks to Snowflake based on incoming Sparkplug events
snowflake_notify_task_enabled = true

# The number of threads to use for BIRTH forhandling IBSNOWin assetsSnowflake
ibsnow# snowflake_metricsnotify_mqtttask_clientbirth_info_sparkplug_edge_node_idthread_count = MQTT Client Info100

# WhetherThe number orof nottimes to sendretry notificationthe tasksNotifyIngest totask Snowflake based on incomingfailure Sparkplug events
snowflake_notify_tasknbirth_enabledretries = true10

# The number of threadsmilliseconds to use for BIRTH handling in Snowflake
# delay after receiving an NBIRTH before notifying Snowflake over the event (requires snowflake_notify_task_birth_thread_count_enabled is true)
snowflake_notify_nbirth_task_delay = 10015000

# The number of milliseconds to delay after receiving an NBIRTHa DBIRTH or DATA message before notifying Snowflake over the event (requires snowflake_notify_task_enabled is true) is true)
snowflake_notify_data_task_delay = 5000

# NotifyTimerTask keep alive in seconds. This setting allows to kill inactive threads after keep-alive expires.
snowflake_notify_nbirthtask_taskkeep_delayalive = 1000060

# TheEnable numberperformance ofmetrics millisecondssuch toas delay'NotifyIngest afterthread receivingpool ainfo' DBIRTHand or'Message DATAInflow messageRates' beforeper notifyingEdgeNode Snowflake over the event (requires snowflake_notify_task_enabled is true)or topic.
snowflake_notifyenable_dataperformance_task_delaymetrics = 5000true

Configuration Examples

Cirrus Link's Chariot MQTT Server using a real signed TLS certification

...