Versions Compared

Key

  • This line was added.
  • This line was removed.
  • Formatting was changed.

...

Code Block
languagebash
titleibsnow.properties
# The IBSNOW instance friendly name. If ommittedomitted, it will become 'IBSNOW-ec2vm-instance-id'
#ibsnow_instance_name =  

# The Cloud region the IoT Bridge for Snowflake instance is in
# ibsnow_cloud_region = us-east-1

# MQTT Server definitions. IoT Bridge for Snowflake supports multiple MQTT Servers. Each definition must include and 'index' as shown
# below represented by 'X'. The first should begin with 1 and each additional server definition should have an index of 1 greater
# than the previous.
# mqtt_server_url.X                                         # The MQTT Server URL
# mqtt_server_name.X                    #                    # The MQTT Server name
# mqtt_username.X                                             # The MQTT username (if required by the MQTT Server)
# mqtt_password.X                       # The MQTT password (if required by the    # The MQTT password (if required by the MQTT Server)
# mqtt_keepalive_timeout.X              #             # The MQTT keep-alive timeout in seconds
# mqtt_ca_cert_chain_path.X                         # The filepathpath to the TLS Certificate Authority certificate chain
# mqtt_client_cert_path.X                      # The filepathpath to the Device TLS certificate
# mqtt_client_private_key_path.X            ## The filepathpath to the Device TLS private key
# mqtt_client_private_key_password.X      ## The Device TLS private key password
# mqtt_verify_hostname.X                #              # Whether or not to verify the hostname against the server certificate
# mqtt_client_id.X                      #                      # The Client ID of the MQTT Client
# mqtt_sparkplug_subscriptions.X            ## The Sparkplug subscriptions to issue when connecting to the MQTT Server.
                                                           # By default this is spBv1.0/# but can be scoped more narrowly (e.g. spBv1.0/Group1/#)
                                                                              # It can also be a comma separated list (e.g. spBv1.0/Group1/#,spBv1.0/Group2/#)

mqtt_server_url.1 = ssl://REPLACE_WITH_MQTT_SERVER_ENDPOINT:8883
mqtt_server_name.1 = My MQTT Server
mqtt_sparkplug_subscriptions.1 = spBv1.0/#
#mqtt_keepalive_timeout.1 = 30
#mqtt_verify_hostname.1
# Enable Snowflake Sparkplug MQTT Application
snowflake_application_enabled = true
#mqtt_username.1 =
#mqtt_password.1 =
#mqtt_ca_cert_chain_path.1 = /opt/ibsnow/conf/certs/myCACert.pem
#mqtt_client_cert_path.1 = /opt/ibsnow/conf/certs/myClientCert.pem
#mqtt_client_private_key_path

# Enable Snowflake Raw MQTT Application
snowflake_mqtt_application_enabled = true

# Comma separated list of Sparkplug subscriptions
mqtt_sparkplug_subscriptions.1 = /opt/ibsnow/conf/certs/myPrivateKey.pem
#mqtt_client_private_key_passwordspBv1.0/#

# Comma separated list of MQTT subscription topic:QoS pairs (e.g. a/#:0,b/#:0)
# mqtt_subscriptions.1 =

#mqtt_clientkeepalive_idtimeout.1 =

# The Sparkplug sequence reordering timeout in milliseconds
sequence_reordering_timeout 30
#mqtt_verify_hostname.1 = 5000

# Whether or not to block auto-rebirth requests
#block_auto_rebirth = false

# The primary host ID if this is the acting primary host
#primary_host_id =

# Snowflake streaming connection properties - A custom client name for the connection (e.g. MyClient)
snowflake_streaming_client_name = IBSNOWClient

# Snowflake streaming connection properties - The scheme to use for channels and their names
# This MUST be one of the following: STATIC, GROUP_ID, EDGE_ID
# STATIC - means to use a single channel. If using this mode, the snowflake_streaming_channel_name
# GROUP_ID - means to use the Sparkplug Group ID for the channel name on incoming data
# EDGE_ID - means to use the Sparkplug Group ID and the Edge Node ID for the channel name on incoming data
# DEVICE_ID - means to use the Sparkplug Group ID, Edge Node ID, and Device ID for the channel name on incoming data
snowflake_streaming_channel_scheme = EDGE_IDtrue
#mqtt_username.1 =
#mqtt_password.1 =
#mqtt_ca_cert_chain_path.1 = /opt/ibsnow/conf/certs/
#mqtt_client_cert_path.1 = /opt/ibsnow/conf/certs/
#mqtt_client_private_key_path.1 = /opt/ibsnow/conf/certs/
#mqtt_client_private_key_password.1 =
#mqtt_client_id.1 =

# The Sparkplug sequence reordering timeout in milliseconds
sequence_reordering_timeout = 5000

# Whether or not to block auto-rebirth requests
#block_auto_rebirth = false

# The primary host ID if this is the acting primary host
#primary_host_id =

# Snowflake streaming connection properties - A custom client name for the connection (e.g. MyClient)
#snowflake_streaming_client_name = IBSNOWClient

# Maximum number of rows to insert into the snowflake_streaming_table at once. Set to 0 for no limit.
snowflake_max_streaming_insert_batch_size = 0

# Maximum number of rows to insert into the snowflake_mqtt_streaming_table at once. Set to 0 for no limit.  
snowflake_max_mqtt_streaming_insert_batch_size = 0

# Do raw MQTT inserts one topic at a time
snowflake_topic_based_mqtt_streaming_insert = false

# Ingest task execution period (in seconds)
snowflake_streaming_ingest_task_period = 1

# Raw MQTT ingest task execution period (in seconds) 
snowflake_mqtt_streaming_ingest_task_period = 1


# Snowflake streaming connection properties - AThe customscheme channelto nameuse for thechannels connection (e.g. MyChannel)and their names
# IfThis thisMUST isbe left blank/empty, Channel names one of the Sparkplug Group ID will be used instead offollowing: STATIC, GROUP_ID, EDGE_ID
# STATIC - means to use a single channel
#. If using this mode, the snowflake_streaming_channel_name =

# SnowflakeGROUP_ID streaming- connectionmeans propertiesto -use Thethe TableSparkplug nameGroup associatedID withfor the channel name Databaseon andincoming Schema already provisioned in the Snowflake account (e.g. MyTable)
snowflake_streaming_table_name = SPARKPLUG_RAW

# Snowflake notify connection properties - The Database name associated with the connection that is already provisioned in the Snowflake account (e.g. MyDb)
snowflake_notify_db_name = cl_bridge_node_dbdata
# EDGE_ID - means to use the Sparkplug Group ID and the Edge Node ID for the channel name on incoming data
# DEVICE_ID - means to use the Sparkplug Group ID, Edge Node ID, and Device ID for the channel name on incoming data
snowflake_streaming_channel_scheme = EDGE_ID

# Snowflake notifystreaming connection properties - A Thecustom Schemachannel name associatedfor with the Database already provisioned in the Snowflake accountconnection (e.g. PUBLICMyChannel)
snowflake_notify_schema_name = stage_db# If this is left blank/empty, Channel names of the Sparkplug Group ID will be used instead of a single channel
# snowflake_streaming_channel_name =

# Snowflake notifystreaming connection properties - The WarehouseTable name for Sparkplug data associated with the Database and notificationsSchema already provisioned in the Snowflake account (e.g. PUBLICMyTable)
snowflake_notifystreaming_warehousetable_name = cl_bridge_ingest_whSPARKPLUG_RAW

# WhetherSnowflake orstreaming notconnection properties to- createThe andTable updatename IBSNOWfor infomationalRAW trackingMQTT metrics
# ibsnow_metrics_enableddata.
snowflake_mqtt_streaming_table_name = trueMQTT_RAW

# TheMaximum Sparkplugnumber Groupof IDstreaming to usechannels for IBSNOWRAW assetMQTT namesmessages
ibsnow# snowflake_metricsmax_sparkplugmqtt_groupstreaming_idchannels = IBSNOW

# TheSnowflake 'Bridgenotify Info'connection Sparkplugproperties Edge- NodeThe IDDatabase toname use for IBSNOW assets
ibsnow_metrics_bridge_info_sparkplug_edge_node_id = Bridge Info

# The 'Edge Node Info' Sparkplug Edge Node ID to use for IBSNOW assets
ibsnow_metrics_edge_node_info_sparkplug_edge_node_id = Edge Node Info

# The 'MQTT Client Info' Sparkplug Edge Node ID to use for IBSNOW assets
ibsnow_metrics_mqtt_client_info_sparkplug_edge_node_id = MQTT Client Infoassociated with the connection that is already provisioned in the Snowflake account (e.g. MyDb)
snowflake_notify_db_name = cl_bridge_node_db

# Snowflake notify connection properties - The Schema name associated with the Database already provisioned in the Snowflake account (e.g. PUBLIC)
snowflake_notify_schema_name = stage_db

# Snowflake notify connection properties - The Warehouse name associated with the notifications already provisioned in the Snowflake account (e.g. PUBLIC)
snowflake_notify_warehouse_name = cl_bridge_ingest_wh

# Whether or not to sendcreate notificationand tasksupdate toIBSNOW Snowflakeinformational basedtracking on incoming Sparkplug events
snowflake_notify_taskmetrics
# ibsnow_metrics_enabled = true

# The numberSparkplug ofGroup threadsID to use for BIRTHIBSNOW handlingasset in Snowflake
# snowflake_notify_task_birth_thread_countnames
ibsnow_metrics_sparkplug_group_id = 100IBSNOW

# The number'Bridge ofInfo' millisecondsSparkplug toEdge delayNode afterID receivingto anuse NBIRTHfor before notifying Snowflake over the event (requires snowflake_notify_task_enabled is true)
snowflake_notify_nbirth_task_delay = 10000IBSNOW assets
ibsnow_metrics_bridge_info_sparkplug_edge_node_id = Bridge Info

# The number'Edge ofNode millisecondsInfo' toSparkplug delayEdge afterNode receivingID ato DBIRTHuse orfor DATA message before notifying Snowflake over the event (requires snowflake_notify_task_enabled is true)
snowflake_notify_data_task_delay = 5000

Configuration Examples

Cirrus Link's Chariot MQTT Server using a real signed TLS certification

If you are using Cirrus Link's Chariot MQTT Server using a real signed TLS certification then your configuration file should look similar to the one below.

# The IBSNOW instance friendly name. If ommitted, it will become 'IBSNOW-ec2-instance-id' #ibsnow_instance_name = 
IBSNOW assets
ibsnow_metrics_edge_node_info_sparkplug_edge_node_id = Edge Node Info

# The 'MQTT Client Info' Sparkplug Edge Node ID to use for IBSNOW assets
ibsnow_metrics_mqtt_client_info_sparkplug_edge_node_id = MQTT Client Info

# Whether or not to send notification tasks to Snowflake based on incoming Sparkplug events
snowflake_notify_task_enabled = true

# The 
Cloud
number 
region
of 
the
threads 
IoT
to 
Bridge
use for 
Snowflake
BIRTH 
instance
handling 
is
in 
in
Snowflake
# 
ibsnow_cloud_region
snowflake_notify_task_birth_thread_count = 
us-east-1
100

# 
MQTT
The 
Server
number 
definitions.
of 
IoT
times 
Bridge
to 
for
retry 
Snowflake
the 
supports
NotifyIngest 
multiple
task 
MQTT
on 
Servers. Each definition must include and 'index' as shown # below represented by 'X'. The first should begin with 1 and each additional server definition should have an index of 1 greater # than the previous. # mqtt_server_url.X                     # The MQTT Server URL # mqtt_server_name.X                    # The MQTT Server name # mqtt_username.X                       # The MQTT username (if required by the MQTT Server) # mqtt_password.X                       # The MQTT password (if required by the MQTT Server) # mqtt_keepalive_timeout.X              # The MQTT keep-alive timeout in seconds # mqtt_ca_cert_chain_path.X             # The filepath to the TLS Certificate Authority certificate chain # mqtt_client_cert_path.X               # The filepath to the TLS certificate # mqtt_client_private_key_path.X        # The filepath to the TLS private key # mqtt_client_private_key_password.X    # The TLS private key password # mqtt_verify_hostname.X                # Whether or not to verify the hostname against the server certificate # mqtt_client_id.X                      # The Client ID of the MQTT Client # mqtt_sparkplug_subscriptions.X        # The Sparkplug subscriptions to issue when connecting to the MQTT Server.                     # By default this is spBv1.0/# but can be scoped more narrowly (e.g. spBv1.0/Group1/#)                                         # It can also be a comma separated list (e.g. spBv1.0/Group1/#,spBv1.0/Group2/#) mqtt_server_url.1 = ssl://chariot.mycompany.com:8883 mqtt_server_name.1 = Chariot MQTT Server mqtt_sparkplug_subscriptions.1 = spBv1.0/# #mqtt_keepalive_timeout.1 = 30 #mqtt_verify_hostname.1 = true mqtt_username.1 = admin mqtt_password.1 = changeme #mqtt_ca_cert_chain_path.1 = /opt/ibsnow/conf/certs/myCACert.epm #mqtt_client_cert_path.1 = /opt/ibsnow/conf/certs/myClientcert.pem #mqtt_client_private_key_path.1 = /opt/ibsnow/conf/certs/myPrivateKey.pem #mqtt_client_private_key_password.1 = #mqtt_client_id.1 = # The Sparkplug sequence reordering timeout in milliseconds sequence_reordering_timeout = 5000 # Whether or not to block auto-rebirth requests #block_auto_rebirth = false # The primary host ID if this is the acting primary host primary_host_id = IamHost # Snowflake streaming connection properties - A custom client name for the connection (e.g. MyClient) snowflake_streaming_client_name = IBSNOWClient # Snowflake streaming connection properties - The scheme to use for channels and their names # This MUST be one of the following: STATIC, GROUP_ID, EDGE_ID # STATIC - means to use a single channel. If using this mode, the snowflake_streaming_channel_name # GROUP_ID - means to use the Sparkplug Group ID for the channel name on incoming data # EDGE_ID - means to use the Sparkplug Group ID and the Edge Node ID for the channel name on incoming data # DEVICE_ID - means to use the Sparkplug Group ID, Edge Node ID, and Device ID for the channel name on incoming data snowflake_streaming_channel_scheme = EDGE_ID # Snowflake streaming connection properties - A custom channel name for the connection (e.g. MyChannel) # If this is left blank/empty, Channel names of the Sparkplug Group ID will be used instead of a single channel # snowflake_streaming_channel_name = # Snowflake streaming connection properties - The Table name associated with the Database and Schema already provisioned in the Snowflake account (e.g. MyTable) snowflake_streaming_table_name = SPARKPLUG_RAW # Snowflake notify connection properties - The Database name associated with the connection that is already provisioned in the Snowflake account (e.g. MyDb) snowflake_notify_db_name = cl_bridge_node_db # Snowflake notify connection properties - The Schema name associated with the Database already provisioned in the Snowflake account (e.g. PUBLIC) snowflake_notify_schema_name = stage_db # Snowflake notify connection properties - The Warehouse name associated with the notifications already provisioned in the Snowflake account (e.g. PUBLIC) snowflake_notify_warehouse_name = cl_bridge_ingest_wh # Whether or not to create and update IBSNOW infomational tracking metrics # ibsnow_metrics_enabled = true # The Sparkplug Group ID to use for IBSNOW asset names ibsnow_metrics_sparkplug_group_id = IBSNOW # The 'Bridge Info' Sparkplug Edge Node ID to use for IBSNOW assets ibsnow_metrics_bridge_info_sparkplug_edge_node_id = Bridge Info # The 'Edge Node Info' Sparkplug Edge Node ID to use for IBSNOW assets ibsnow_metrics_edge_node_info_sparkplug_edge_node_id = Edge Node Info # The 'MQTT Client Info' Sparkplug Edge Node ID to use for IBSNOW assets ibsnow_metrics_mqtt_client_info_sparkplug_edge_node_id = MQTT Client Info # Whether or not to send notification tasks to Snowflake based on incoming Sparkplug events snowflake_notify_task_enabled = true # The number of threads to use for BIRTH handling in Snowflake # snowflake_notify_task_birth_thread_count = 100 # The number of milliseconds to delay after receiving an NBIRTH before notifying Snowflake over the event (requires snowflake_notify_task_enabled is true) snowflake_notify_nbirth_task_delay = 10000 # The number of milliseconds to delay after receiving a DBIRTH or DATA message before notifying Snowflake over the event (requires snowflake_notify_task_enabled is true) snowflake_notify_data_task_delay = 5000
Excerpt
Code Block
languagebash
titleChariot MQTT Server using a real signed TLS certificate

AWS IoT Core

If you are using AWS IoT Core then your configuration file should look similar to the one below.

failure 
snowflake_notify_nbirth_retries = 10

# The number of milliseconds to delay after receiving an NBIRTH before notifying Snowflake over the event (requires snowflake_notify_task_enabled is true)
snowflake_notify_nbirth_task_delay = 15000

# The number of milliseconds to delay after receiving a DBIRTH or DATA message before notifying Snowflake over the event (requires snowflake_notify_task_enabled is true)
snowflake_notify_data_task_delay = 5000

# NotifyTimerTask keep alive in seconds. This setting allows to kill inactive threads after keep-alive expires.
snowflake_notify_task_keep_alive = 60

# Enable performance metrics such as 'NotifyIngest thread pool info' and 'Message Inflow Rates' per EdgeNode or topic.
snowflake_enable_performance_metrics = true

Configuration Examples

Cirrus Link's Chariot MQTT Server using a real signed TLS certification

If you are using Cirrus Link's Chariot MQTT Server using a real signed TLS certification then your configuration file should look similar to the one below.

Excerpt


Code Block
languagebash
titleChariot MQTT Server using a real signed TLS certificate
# The IBSNOW instance friendly name. If omitted, it will become 'IBSNOW-vm-instance-id'
#ibsnow_instance_name = 

# The Cloud region the IoT Bridge for Snowflake instance is in
# ibsnow_cloud_region = us-east-1

# MQTT Server definitions. IoT Bridge for Snowflake supports multiple MQTT Servers. Each definition must include and 'index' as shown
# below represented by 'X'. The first should begin with 1 and each additional server definition should have an index of 1 greater
# than the previous.
# mqtt_server_url.X                     # The MQTT Server URL
# mqtt_server_name.X                    # The MQTT Server name
# mqtt_username.X                       # The MQTT username (if required by the MQTT Server)
# mqtt_password.X                       # The MQTT password (if required by the MQTT Server)
# mqtt_keepalive_timeout.X              # The MQTT keep-alive timeout in seconds
# mqtt_ca_cert_chain_path.X             # The path to the TLS Certificate Authority certificate chain
# mqtt_client_cert_path.X               # The path to the TLS certificate
# mqtt_client_private_key_path.X        # The path to the TLS private key
# mqtt_client_private_key_password.X    # The TLS private key password
# mqtt_verify_hostname.X                # Whether or not to verify the hostname against the server certificate
# mqtt_client_id.X                      # The Client ID of the MQTT Client
# mqtt_sparkplug_subscriptions.X        # The Sparkplug subscriptions to issue when connecting to the MQTT Server.
                                        # By default this is spBv1.0/# but can be scoped more narrowly (e.g. spBv1.0/Group1/#)
                                        # It can also be a comma separated list (e.g. spBv1.0/Group1/#,spBv1.0/Group2/#)

mqtt_server_url.1 = ssl://chariot.mycompany.com:8883
mqtt_server_name.1 = Chariot MQTT Server

# Enable Snowflake Sparkplug MQTT Application
snowflake_application_enabled = true

# Enable Snowflake Raw MQTT Application
snowflake_mqtt_application_enabled = true

# Comma separated list of Sparkplug subscriptions
mqtt_sparkplug_subscriptions.1 = spBv1.0/#

# Comma separated list of MQTT subscription topic:QoS pairs (e.g. a/#:0,b/#:0)
# mqtt_subscriptions.1 =

#mqtt_keepalive_timeout.1 = 30
#mqtt_verify_hostname.1 = true
mqtt_username.1 = admin
mqtt_password.1 = changeme
#mqtt_ca_cert_chain_path.1 = /opt/ibsnow/conf/certs/
#mqtt_client_cert_path.1 = /opt/ibsnow/conf/certs/
#mqtt_client_private_key_path.1 = /opt/ibsnow/conf/certs/
#mqtt_client_private_key_password.1 =
#mqtt_client_id.1 =

# The Sparkplug sequence reordering timeout in milliseconds
sequence_reordering_timeout = 5000

# Whether or not to block auto-rebirth requests
#block_auto_rebirth = false

# The primary host ID if this is the acting primary host
#primary_host_id =

# Snowflake streaming connection properties - A custom client name for the connection (e.g. MyClient)
#snowflake_streaming_client_name = IBSNOWClient

# Maximum number of rows to insert into the snowflake_streaming_table at once. Set to 0 for no limit.
snowflake_max_streaming_insert_batch_size = 0

# Maximum number of rows to insert into the snowflake_mqtt_streaming_table at once. Set to 0 for no limit.  
snowflake_max_mqtt_streaming_insert_batch_size = 0

# Do raw MQTT inserts one topic at a time
snowflake_topic_based_mqtt_streaming_insert = false

# Ingest task execution period (in seconds)
snowflake_streaming_ingest_task_period = 1

# Raw MQTT ingest task execution period (in seconds) 
snowflake_mqtt_streaming_ingest_task_period = 1


# Snowflake streaming connection properties - The scheme to use for channels and their names
# This MUST be one of the following: STATIC, GROUP_ID, EDGE_ID
# STATIC - means to use a single channel. If using this mode, the snowflake_streaming_channel_name
# GROUP_ID - means to use the Sparkplug Group ID for the channel name on incoming data
# EDGE_ID - means to use the Sparkplug Group ID and the Edge Node ID for the channel name on incoming data
# DEVICE_ID - means to use the Sparkplug Group ID, Edge Node ID, and Device ID for the channel name on incoming data
snowflake_streaming_channel_scheme = EDGE_ID

# Snowflake streaming connection properties - A custom channel name for the connection (e.g. MyChannel)
# If this is left blank/empty, Channel names of the Sparkplug Group ID will be used instead of a single channel
# snowflake_streaming_channel_name =

# Snowflake streaming connection properties - The Table name for Sparkplug data associated with the Database and Schema already provisioned in the Snowflake account (e.g. MyTable)
snowflake_streaming_table_name = SPARKPLUG_RAW

# Snowflake streaming connection properties - The Table name for RAW MQTT data.
snowflake_mqtt_streaming_table_name = MQTT_RAW

# Maximum number of streaming channels for RAW MQTT messages
# snowflake_max_mqtt_streaming_channels =

# Snowflake notify connection properties - The Database name associated with the connection that is already provisioned in the Snowflake account (e.g. MyDb)
snowflake_notify_db_name = cl_bridge_node_db

# Snowflake notify connection properties - The Schema name associated with the Database already provisioned in the Snowflake account (e.g. PUBLIC)
snowflake_notify_schema_name = stage_db

# Snowflake notify connection properties - The Warehouse name associated with the notifications already provisioned in the Snowflake account (e.g. PUBLIC)
snowflake_notify_warehouse_name = cl_bridge_ingest_wh

# Whether or not to create and update IBSNOW informational tracking metrics
# ibsnow_metrics_enabled = true

# The Sparkplug Group ID to use for IBSNOW asset names
ibsnow_metrics_sparkplug_group_id = IBSNOW

# The 'Bridge Info' Sparkplug Edge Node ID to use for IBSNOW assets
ibsnow_metrics_bridge_info_sparkplug_edge_node_id = Bridge Info

# The 'Edge Node Info' Sparkplug Edge Node ID to use for IBSNOW assets
ibsnow_metrics_edge_node_info_sparkplug_edge_node_id = Edge Node Info

# The 'MQTT Client Info' Sparkplug Edge Node ID to use for IBSNOW assets
ibsnow_metrics_mqtt_client_info_sparkplug_edge_node_id = MQTT Client Info

# Whether or not to send notification tasks to Snowflake based on incoming Sparkplug events
snowflake_notify_task_enabled = true

# The number of threads to use for BIRTH handling in Snowflake
# snowflake_notify_task_birth_thread_count = 100

# The number of times to retry the NotifyIngest task on failure 
snowflake_notify_nbirth_retries = 10

# The number of milliseconds to delay after receiving an NBIRTH before notifying Snowflake over the event (requires snowflake_notify_task_enabled is true)
snowflake_notify_nbirth_task_delay = 15000

# The number of milliseconds to delay after receiving a DBIRTH or DATA message before notifying Snowflake over the event (requires snowflake_notify_task_enabled is true)
snowflake_notify_data_task_delay = 5000

# NotifyTimerTask keep alive in seconds. This setting allows to kill inactive threads after keep-alive expires.
snowflake_notify_task_keep_alive = 60

# Enable performance metrics such as 'NotifyIngest thread pool info' and 'Message Inflow Rates' per EdgeNode or topic.
snowflake_enable_performance_metrics = true




AWS IoT Core

If you are using AWS IoT Core then your configuration file should look similar to the one below.

Code Block
languagebash
titleAWS IoT Core

# The IBSNOW instance friendly name. If omitted, it will become 'IBSNOW-vm-instance-id'
#ibsnow_instance_name = 

# The Cloud region the IoT Bridge for Snowflake instance is in
# ibsnow_cloud_region = us-east-1

# MQTT Server definitions. IoT Bridge for Snowflake supports multiple MQTT Servers. Each definition must include and 'index' as shown
# below represented by 'X'. The first should begin with 1 and each additional server definition should have an index of 1 greater
# than the previous.
# mqtt_server_url.X                     # The MQTT Server URL
# mqtt_server_name.X                    # The MQTT Server name
# mqtt_username.X                       # The MQTT username (if required by the MQTT Server)
# mqtt_password.X                       # The MQTT password (if required by the MQTT Server)
# mqtt_keepalive_timeout.X              # The MQTT keep-alive timeout in seconds
# mqtt_ca_cert_chain_path.X             # The path to the TLS Certificate Authority certificate chain
# mqtt_client_cert_path.X               # The path to the TLS certificate
# mqtt_client_private_key_path.X        # The path to the TLS private key
# mqtt_client_private_key_password.X    # The TLS private key password
# mqtt_verify_hostname.X                # Whether or not to verify the hostname against the server certificate
# mqtt_client_id.X                      # The Client ID of the MQTT Client
# mqtt_sparkplug_subscriptions.X        # The Sparkplug subscriptions to issue when connecting to the MQTT Server.
                                        # By default this is spBv1.0/# but can be scoped more narrowly (e.g. spBv1.0/Group1/#)
                                        # It can also be a comma separated list (e.g. spBv1.0/Group1/#,spBv1.0/Group2/#)

mqtt_server_url.1 = ssl://b9ffnzzzzzzzz-ats.iot.us-east-1.amazonaws.com:8883
mqtt_server_name.1 = AWS IoT Core MQTT Server

# Enable Snowflake Sparkplug MQTT Application
snowflake_application_enabled = true

# Enable Snowflake Raw MQTT Application
snowflake_mqtt_application_enabled = true

# Comma separated list of Sparkplug subscriptions
mqtt_sparkplug_subscriptions.1 = spBv1.0/#

# Comma separated list of MQTT subscription topic:QoS pairs (e.g. a/#:0,b/#:0)
# mqtt_subscriptions.1 =

#mqtt_keepalive_timeout.1 = 30
#mqtt_verify_hostname.1 = true
#mqtt_username.1 =
#mqtt_password.1 =
mqtt_ca_cert_chain_path.1 = /opt/ibsnow/conf/certs/AmazonRootCA1.pem
mqtt_client_cert_path.1 = /opt/ibsnow/conf/certs/72d382zzzz.cert.pem
mqtt_client_private_key_path.1 = /opt/ibsnow/conf/certs/72d382zzzz.private.key
#mqtt_client_private_key_password.1 =
#mqtt_client_id.1 =

# The Sparkplug sequence reordering timeout in milliseconds
sequence_reordering_timeout = 5000

# Whether or not to block auto-rebirth requests
#block_auto_rebirth = false

# The primary host ID if this is the acting primary host
#primary_host_id =

# Snowflake streaming connection properties - A custom client name for the connection (e.g. MyClient)
#snowflake_streaming_client_name = IBSNOWClient

# Maximum number of rows to insert into the snowflake_streaming_table at once. Set to 0 for no limit.
snowflake_max_streaming_insert_batch_size = 0

# Maximum number of rows to insert into the snowflake_mqtt_streaming_table at once. Set to 0 for no limit.  
snowflake_max_mqtt_streaming_insert_batch_size = 0

# Do raw MQTT inserts one topic at a time
snowflake_topic_based_mqtt_streaming_insert = false

# Ingest task execution period (in seconds)
snowflake_streaming_ingest_task_period = 1

# Raw MQTT ingest task execution period (in seconds) 
snowflake_mqtt_streaming_ingest_task_period = 1
Code Block
languagebash
titleAWS IoT Core
# The IBSNOW instance friendly name. If ommitted, it will become 'IBSNOW-ec2-instance-id'
#ibsnow_instance_name = 

# The Cloud region the IoT Bridge for Snowflake instance is in
# ibsnow_cloud_region = us-east-1

# MQTT Server definitions. IoT Bridge for Snowflake supports multiple MQTT Servers. Each definition must include and 'index' as shown
# below represented by 'X'. The first should begin with 1 and each additional server definition should have an index of 1 greater
# than the previous.
# mqtt_server_url.X                     # The MQTT Server URL
# mqtt_server_name.X                    # The MQTT Server name
# mqtt_username.X                       # The MQTT username (if required by the MQTT Server)
# mqtt_password.X                       # The MQTT password (if required by the MQTT Server)
# mqtt_keepalive_timeout.X              # The MQTT keep-alive timeout in seconds
# mqtt_ca_cert_chain_path.X             # The filepath to the TLS Certificate Authority certificate chain
# mqtt_client_cert_path.X               # The filepath to the TLS certificate
# mqtt_client_private_key_path.X        # The filepath to the TLS private key
# mqtt_client_private_key_password.X    # The TLS private key password
# mqtt_verify_hostname.X                # Whether or not to verify the hostname against the server certificate
# mqtt_client_id.X                      # The Client ID of the MQTT Client
# mqtt_sparkplug_subscriptions.X        # The Sparkplug subscriptions to issue when connecting to the MQTT Server.
                    # By default this is spBv1.0/# but can be scoped more narrowly (e.g. spBv1.0/Group1/#)
                                        # It can also be a comma separated list (e.g. spBv1.0/Group1/#,spBv1.0/Group2/#)

mqtt_server_url.1 = ssl://b9ffnzzzzzzzz-ats.iot.us-east-1.amazonaws.com:8883
mqtt_server_name.1 = AWS IoT Core MQTT Server
mqtt_sparkplug_subscriptions.1 = spBv1.0/#
#mqtt_keepalive_timeout.1 = 30
#mqtt_verify_hostname.1 = true
#mqtt_username.1 =
#mqtt_password.1 =
mqtt_ca_cert_chain_path.1 = /opt/ibsnow/conf/certs/AmazonRootCA1.pem
mqtt_client_cert_path.1 = /opt/ibsnow/conf/certs/72d382zzzz.cert.pem
mqtt_client_private_key_path.1 = /opt/ibsnow/conf/certs/72d382zzzz.private.key
#mqtt_client_private_key_password.1 =
#mqtt_client_id.1 =

# The Sparkplug sequence reordering timeout in milliseconds
sequence_reordering_timeout = 5000

# Whether or not to block auto-rebirth requests
#block_auto_rebirth = false

# The primary host ID if this is the acting primary host
primary_host_id = IamHost

# Snowflake streaming connection properties - A custom client name for the connection (e.g. MyClient)
snowflake_streaming_client_name = IBSNOWClient

# Snowflake streaming connection properties - The scheme to use for channels and their names
# This MUST be one of the following: STATIC, GROUP_ID, EDGE_ID
# STATIC - means to use a single channel. If using this mode, the snowflake_streaming_channel_name
# GROUP_ID - means to use the Sparkplug Group ID for the channel name on incoming data
# EDGE_ID - means to use the Sparkplug Group ID and the Edge Node ID for the channel name on incoming data
# DEVICE_ID - means to use the Sparkplug Group ID, Edge Node ID, and Device ID for the channel name on incoming data
snowflake_streaming_channel_scheme = EDGE_ID

# Snowflake streaming connection properties - A custom channel name for the connection (e.g. MyChannel)
# If this is left blank/empty, Channel names of the Sparkplug Group ID will be used instead of a single channel
# snowflake_streaming_channel_name =

# Snowflake streaming connection properties - The Table name for Sparkplug data associated with the Database and Schema already provisioned in the Snowflake account (e.g. MyTable)
snowflake_streaming_table_name = SPARKPLUG_RAW and Schema already provisioned in the Snowflake account (e.g. MyTable)
snowflake_streaming_table_name = SPARKPLUG_RAW

# Snowflake streaming connection properties - The Table name for RAW MQTT data.
snowflake_mqtt_streaming_table_name = MQTT_RAW

# Maximum number of streaming channels for RAW MQTT messages
# snowflake_max_mqtt_streaming_channels =

# Snowflake notify connection properties - The Database name associated with the connection that is already provisioned in the Snowflake account (e.g. MyDb)
snowflake_notify_db_name = cl_bridge_node_db

# Snowflake notify connection properties - The Schema name associated with the Database already provisioned in the Snowflake account (e.g. PUBLIC)
snowflake_notify_schema_name = stage_db

# Snowflake notify connection properties - The Warehouse name associated with the notifications already provisioned in the Snowflake account (e.g. PUBLIC)
snowflake_notify_warehouse_name = cl_bridge_ingest_wh

# Whether or not to create and update IBSNOW infomationalinformational tracking metrics
# ibsnow_metrics_enabled = true

# The Sparkplug Group ID to use for IBSNOW asset names
ibsnow_metrics_sparkplug_group_id = IBSNOW

# The 'Bridge Info' Sparkplug Edge GroupNode ID to use for IBSNOW asset namesassets
ibsnow_metrics_bridge_info_sparkplug_groupedge_node_id = IBSNOWBridge Info

# The 'BridgeEdge Node Info' Sparkplug Edge Node ID to use for IBSNOW assets
ibsnow_metrics_edge_bridgenode_info_sparkplug_edge_node_id = BridgeEdge Node Info

# The 'EdgeMQTT NodeClient Info' Sparkplug Edge Node ID to use for IBSNOW assets
ibsnow_metrics_edgemqtt_nodeclient_info_sparkplug_edge_node_id = EdgeMQTT NodeClient Info

# The 'MQTT Client Info' Sparkplug Edge Node IDWhether or not to send notification tasks to Snowflake based on incoming Sparkplug events
snowflake_notify_task_enabled = true

# The number of threads to use for BIRTH handling IBSNOWin assetsSnowflake
ibsnow# snowflake_metricsnotify_mqtttask_clientbirth_info_sparkplug_edge_node_idthread_count = MQTT Client Info100

# WhetherThe number orof nottimes to sendretry notificationthe tasks to Snowflake basedNotifyIngest task on incomingfailure Sparkplug events
snowflake_notify_tasknbirth_enabledretries = true10

# The number of threadsmilliseconds to use for BIRTH handling in Snowflake
# delay after receiving an NBIRTH before notifying Snowflake over the event (requires snowflake_notify_task_birth_thread_countenabled is true)
snowflake_notify_nbirth_task_delay = 10015000

# The number of milliseconds to delay after receiving an NBIRTH a DBIRTH or DATA message before notifying Snowflake over the event (requires snowflake_notify_task_enabled is true)_enabled is true)
snowflake_notify_data_task_delay = 5000

# NotifyTimerTask keep alive in seconds. This setting allows to kill inactive threads after keep-alive expires.
snowflake_notify_nbirthtask_taskkeep_delayalive = 1000060

# TheEnable numberperformance ofmetrics millisecondssuch toas delay'NotifyIngest afterthread receivingpool ainfo' DBIRTHand or'Message DATAInflow messageRates' beforeper notifyingEdgeNode Snowflake over the event (requires snowflake_notify_task_enabled is true)or topic.
snowflake_notifyenable_dataperformance_task_delaymetrics = 5000true