Configuration reference#

pydantic settings ook.config.Configuration#

Configuration for ook.

Parameters:

Show JSON schema
{
   "title": "Configuration",
   "description": "Configuration for ook.",
   "type": "object",
   "properties": {
      "name": {
         "title": "Name",
         "description": "The application's name",
         "default": "ook",
         "env": "SAFIR_NAME",
         "env_names": "{'safir_name'}",
         "type": "string"
      },
      "profile": {
         "description": "Application logging profile: 'development' or 'production'.",
         "default": "production",
         "env": "SAFIR_PROFILE",
         "env_names": "{'safir_profile'}",
         "allOf": [
            {
               "$ref": "#/definitions/Profile"
            }
         ]
      },
      "log_level": {
         "title": "Log level of the application's logger",
         "default": "INFO",
         "env": "SAFIR_LOG_LEVEL",
         "env_names": "{'safir_log_level'}",
         "allOf": [
            {
               "$ref": "#/definitions/LogLevel"
            }
         ]
      },
      "path_prefix": {
         "title": "API URL path prefix",
         "description": "The URL prefix where the application's externally-accessible endpoints are hosted.",
         "default": "/ook",
         "env": "SAFIR_PATH_PREFIX",
         "env_names": "{'safir_path_prefix'}",
         "type": "string"
      },
      "environment_url": {
         "title": "Base URL of the environment",
         "description": "The base URL of the environment where the application is hosted.",
         "env": "SAFIR_ENVIRONMENT_URL",
         "env_names": "{'safir_environment_url'}",
         "minLength": 1,
         "maxLength": 65536,
         "format": "uri",
         "type": "string"
      },
      "kafka": {
         "title": "Kafka",
         "description": "Kafka connection configuration.",
         "env_names": "{'kafka'}",
         "allOf": [
            {
               "$ref": "#/definitions/KafkaConnectionSettings"
            }
         ]
      },
      "registry_url": {
         "title": "Schema Registry URL",
         "env": "OOK_REGISTRY_URL",
         "env_names": "{'ook_registry_url'}",
         "minLength": 1,
         "maxLength": 65536,
         "format": "uri",
         "type": "string"
      },
      "subject_suffix": {
         "title": "Schema subject name suffix",
         "description": "Suffix to add to Schema Registry suffix names. This is useful when deploying for testing/staging and you do not want to affect the production subject and its compatibility lineage.",
         "default": "",
         "env": "OOK_SUBJECT_SUFFIX",
         "env_names": "{'ook_subject_suffix'}",
         "type": "string"
      },
      "subject_compatibility": {
         "title": "Schema subject compatibility",
         "description": "Compatibility level to apply to Schema Registry subjects. Use NONE for testing and development, but prefer FORWARD_TRANSITIVE for production.",
         "default": "FORWARD_TRANSITIVE",
         "env": "OOK_SUBJECT_COMPATIBILITY",
         "env_names": "{'ook_subject_compatibility'}",
         "type": "string"
      },
      "enable_kafka_consumer": {
         "title": "Enable Kafka Consumer",
         "description": "Enable Kafka consumer.",
         "default": true,
         "env": "OOK_ENABLE_CONSUMER",
         "env_names": "{'ook_enable_consumer'}",
         "type": "boolean"
      },
      "ingest_kafka_topic": {
         "title": "Ingest Kafka Topic",
         "description": "The name of the Kafka topic for the ingest queue.",
         "default": "ook.ingest",
         "env": "OOK_INGEST_KAFKA_TOPIC",
         "env_names": "{'ook_ingest_kafka_topic'}",
         "type": "string"
      },
      "kafka_consumer_group_id": {
         "title": "Kafka Consumer Group Id",
         "description": "Kafka consumer group ID.",
         "default": "ook",
         "env": "OOK_GROUP_ID",
         "env_names": "{'ook_group_id'}",
         "type": "string"
      },
      "algolia_app_id": {
         "title": "Algolia App Id",
         "description": "The Algolia app ID",
         "env": "ALGOLIA_APP_ID",
         "env_names": "{'algolia_app_id'}",
         "type": "string"
      },
      "algolia_api_key": {
         "title": "Algolia Api Key",
         "description": "The Algolia API key",
         "env": "ALGOLIA_API_KEY",
         "env_names": "{'algolia_api_key'}",
         "type": "string",
         "writeOnly": true,
         "format": "password"
      },
      "algolia_document_index_name": {
         "title": "Algolia Document Index Name",
         "description": "Name of the Algolia document index",
         "default": "document_dev",
         "env": "ALGOLIA_DOCUMENT_INDEX",
         "env_names": "{'algolia_document_index'}",
         "type": "string"
      },
      "github_app_id": {
         "title": "Github App Id",
         "env": "OOK_GITHUB_APP_ID",
         "env_names": "{'ook_github_app_id'}",
         "type": "string"
      },
      "github_app_private_key": {
         "title": "Github App Private Key",
         "env": "OOK_GITHUB_APP_PRIVATE_KEY",
         "env_names": "{'ook_github_app_private_key'}",
         "type": "string",
         "writeOnly": true,
         "format": "password"
      }
   },
   "required": [
      "environment_url",
      "registry_url",
      "algolia_app_id",
      "algolia_api_key"
   ],
   "additionalProperties": false,
   "definitions": {
      "Profile": {
         "title": "Profile",
         "description": "Logging profile for the application.",
         "enum": [
            "production",
            "development"
         ]
      },
      "LogLevel": {
         "title": "LogLevel",
         "description": "Python logging level.",
         "enum": [
            "DEBUG",
            "INFO",
            "WARNING",
            "ERROR",
            "CRITICAL"
         ]
      },
      "KafkaSecurityProtocol": {
         "title": "KafkaSecurityProtocol",
         "description": "Kafka security protocols understood by aiokafka.",
         "enum": [
            "PLAINTEXT",
            "SSL"
         ],
         "type": "string"
      },
      "KafkaSaslMechanism": {
         "title": "KafkaSaslMechanism",
         "description": "Kafka SASL mechanisms understood by aiokafka.",
         "enum": [
            "PLAIN",
            "SCRAM-SHA-256",
            "SCRAM-SHA-512"
         ],
         "type": "string"
      },
      "KafkaConnectionSettings": {
         "title": "KafkaConnectionSettings",
         "description": "Settings for connecting to Kafka.",
         "type": "object",
         "properties": {
            "bootstrap_servers": {
               "title": "Kafka bootstrap servers",
               "description": "A comma-separated list of Kafka brokers to connect to. This should be a list of hostnames or IP addresses, each optionally followed by a port number, separated by commas. For example: `kafka-1:9092,kafka-2:9092,kafka-3:9092`.",
               "env": "KAFKA_BOOTSTRAP_SERVERS",
               "env_names": "{'kafka_bootstrap_servers'}",
               "type": "string"
            },
            "security_protocol": {
               "description": "The security protocol to use when connecting to Kafka.",
               "default": "PLAINTEXT",
               "env": "KAFKA_SECURITY_PROTOCOL",
               "env_names": "{'kafka_security_protocol'}",
               "allOf": [
                  {
                     "$ref": "#/definitions/KafkaSecurityProtocol"
                  }
               ]
            },
            "cert_temp_dir": {
               "title": "Cert Temp Dir",
               "description": "Temporary writable directory for concatenating certificates.",
               "env": "KAFKA_CERT_TEMP_DIR",
               "env_names": "{'kafka_cert_temp_dir'}",
               "format": "directory-path",
               "type": "string"
            },
            "cluster_ca_path": {
               "title": "Path to CA certificate file",
               "description": "The path to the CA certificate file to use for verifying the broker's certificate. This is only needed if the broker's certificate is not signed by a CA trusted by the operating system.",
               "env": "KAFKA_SSL_CLUSTER_CAFILE",
               "env_names": "{'kafka_ssl_cluster_cafile'}",
               "format": "file-path",
               "type": "string"
            },
            "client_ca_path": {
               "title": "Path to client CA certificate file",
               "description": "The path to the client CA certificate file to use for authentication. This is only needed when the client certificate needs to beconcatenated with the client CA certificate, which is commonfor Strimzi installations.",
               "env": "KAFKA_SSL_CLIENT_CAFILE",
               "env_names": "{'kafka_ssl_client_cafile'}",
               "format": "file-path",
               "type": "string"
            },
            "client_cert_path": {
               "title": "Path to client certificate file",
               "description": "The path to the client certificate file to use for authentication. This is only needed if the broker is configured to require SSL client authentication.",
               "env": "KAFKA_SSL_CLIENT_CERTFILE",
               "env_names": "{'kafka_ssl_client_certfile'}",
               "format": "file-path",
               "type": "string"
            },
            "client_key_path": {
               "title": "Path to client key file",
               "description": "The path to the client key file to use for authentication. This is only needed if the broker is configured to require SSL client authentication.",
               "env": "KAFKA_SSL_CLIENT_KEYFILE",
               "env_names": "{'kafka_ssl_client_keyfile'}",
               "format": "file-path",
               "type": "string"
            },
            "client_key_password": {
               "title": "Password for client key file",
               "description": "The password to use for decrypting the client key file. This is only needed if the client key file is encrypted.",
               "env": "KAFKA_SSL_CLIENT_KEY_PASSWORD",
               "env_names": "{'kafka_ssl_client_key_password'}",
               "type": "string",
               "writeOnly": true,
               "format": "password"
            },
            "sasl_mechanism": {
               "title": "SASL mechanism",
               "description": "The SASL mechanism to use for authentication. This is only needed if SASL authentication is enabled.",
               "default": "PLAIN",
               "env": "KAFKA_SASL_MECHANISM",
               "env_names": "{'kafka_sasl_mechanism'}",
               "allOf": [
                  {
                     "$ref": "#/definitions/KafkaSaslMechanism"
                  }
               ]
            },
            "sasl_username": {
               "title": "SASL username",
               "description": "The username to use for SASL authentication. This is only needed if SASL authentication is enabled.",
               "env": "KAFKA_SASL_USERNAME",
               "env_names": "{'kafka_sasl_username'}",
               "type": "string"
            },
            "sasl_password": {
               "title": "SASL password",
               "description": "The password to use for SASL authentication. This is only needed if SASL authentication is enabled.",
               "env": "KAFKA_SASL_PASSWORD",
               "env_names": "{'kafka_sasl_password'}",
               "type": "string",
               "writeOnly": true,
               "format": "password"
            }
         },
         "required": [
            "bootstrap_servers"
         ],
         "additionalProperties": false
      }
   }
}

Fields:
Validators:
field algolia_api_key: SecretStr [Required]#

The Algolia API key

Constraints:
  • type = string

  • writeOnly = True

  • format = password

field algolia_app_id: str [Required]#

The Algolia app ID

field algolia_document_index_name: str = 'document_dev'#

Name of the Algolia document index

field enable_kafka_consumer: bool = True#

Enable Kafka consumer.

field environment_url: AnyHttpUrl [Required]#

The base URL of the environment where the application is hosted.

Constraints:
  • minLength = 1

  • maxLength = 65536

  • format = uri

field github_app_id: Optional[str] = None#

The GitHub App ID, as determined by GitHub when setting up a GitHub App.

field github_app_private_key: Optional[SecretStr] = None#

The GitHub app private key. See https://docs.github.com/en/apps/creating-github-apps/authenticating-with-a-github-app/managing-private-keys-for-github-apps

Constraints:
  • type = string

  • writeOnly = True

  • format = password

Validated by:
field ingest_kafka_topic: str = 'ook.ingest'#

The name of the Kafka topic for the ingest queue.

field kafka: KafkaConnectionSettings [Optional]#

Kafka connection configuration.

field kafka_consumer_group_id: str = 'ook'#

Kafka consumer group ID.

field log_level: LogLevel = LogLevel.INFO#
field name: str = 'ook'#

The application’s name

field path_prefix: str = '/ook'#

The URL prefix where the application’s externally-accessible endpoints are hosted.

field profile: Profile = Profile.production#

Application logging profile: ‘development’ or ‘production’.

field registry_url: AnyHttpUrl [Required]#
Constraints:
  • minLength = 1

  • maxLength = 65536

  • format = uri

field subject_compatibility: str = 'FORWARD_TRANSITIVE'#

Compatibility level to apply to Schema Registry subjects. Use NONE for testing and development, but prefer FORWARD_TRANSITIVE for production.

field subject_suffix: str = ''#

Suffix to add to Schema Registry suffix names. This is useful when deploying for testing/staging and you do not want to affect the production subject and its compatibility lineage.

validator validate_none_secret  »  github_app_private_key#

Validate a SecretStr setting which may be “None” that is intended to be None.

This is useful for secrets generated from 1Password or environment variables where the value cannot be null.

Parameters:

v (Optional[SecretStr]) –

Return type:

Optional[SecretStr]

pydantic settings ook.config.KafkaConnectionSettings#

Settings for connecting to Kafka.

Parameters:

Show JSON schema
{
   "title": "KafkaConnectionSettings",
   "description": "Settings for connecting to Kafka.",
   "type": "object",
   "properties": {
      "bootstrap_servers": {
         "title": "Kafka bootstrap servers",
         "description": "A comma-separated list of Kafka brokers to connect to. This should be a list of hostnames or IP addresses, each optionally followed by a port number, separated by commas. For example: `kafka-1:9092,kafka-2:9092,kafka-3:9092`.",
         "env": "KAFKA_BOOTSTRAP_SERVERS",
         "env_names": "{'kafka_bootstrap_servers'}",
         "type": "string"
      },
      "security_protocol": {
         "description": "The security protocol to use when connecting to Kafka.",
         "default": "PLAINTEXT",
         "env": "KAFKA_SECURITY_PROTOCOL",
         "env_names": "{'kafka_security_protocol'}",
         "allOf": [
            {
               "$ref": "#/definitions/KafkaSecurityProtocol"
            }
         ]
      },
      "cert_temp_dir": {
         "title": "Cert Temp Dir",
         "description": "Temporary writable directory for concatenating certificates.",
         "env": "KAFKA_CERT_TEMP_DIR",
         "env_names": "{'kafka_cert_temp_dir'}",
         "format": "directory-path",
         "type": "string"
      },
      "cluster_ca_path": {
         "title": "Path to CA certificate file",
         "description": "The path to the CA certificate file to use for verifying the broker's certificate. This is only needed if the broker's certificate is not signed by a CA trusted by the operating system.",
         "env": "KAFKA_SSL_CLUSTER_CAFILE",
         "env_names": "{'kafka_ssl_cluster_cafile'}",
         "format": "file-path",
         "type": "string"
      },
      "client_ca_path": {
         "title": "Path to client CA certificate file",
         "description": "The path to the client CA certificate file to use for authentication. This is only needed when the client certificate needs to beconcatenated with the client CA certificate, which is commonfor Strimzi installations.",
         "env": "KAFKA_SSL_CLIENT_CAFILE",
         "env_names": "{'kafka_ssl_client_cafile'}",
         "format": "file-path",
         "type": "string"
      },
      "client_cert_path": {
         "title": "Path to client certificate file",
         "description": "The path to the client certificate file to use for authentication. This is only needed if the broker is configured to require SSL client authentication.",
         "env": "KAFKA_SSL_CLIENT_CERTFILE",
         "env_names": "{'kafka_ssl_client_certfile'}",
         "format": "file-path",
         "type": "string"
      },
      "client_key_path": {
         "title": "Path to client key file",
         "description": "The path to the client key file to use for authentication. This is only needed if the broker is configured to require SSL client authentication.",
         "env": "KAFKA_SSL_CLIENT_KEYFILE",
         "env_names": "{'kafka_ssl_client_keyfile'}",
         "format": "file-path",
         "type": "string"
      },
      "client_key_password": {
         "title": "Password for client key file",
         "description": "The password to use for decrypting the client key file. This is only needed if the client key file is encrypted.",
         "env": "KAFKA_SSL_CLIENT_KEY_PASSWORD",
         "env_names": "{'kafka_ssl_client_key_password'}",
         "type": "string",
         "writeOnly": true,
         "format": "password"
      },
      "sasl_mechanism": {
         "title": "SASL mechanism",
         "description": "The SASL mechanism to use for authentication. This is only needed if SASL authentication is enabled.",
         "default": "PLAIN",
         "env": "KAFKA_SASL_MECHANISM",
         "env_names": "{'kafka_sasl_mechanism'}",
         "allOf": [
            {
               "$ref": "#/definitions/KafkaSaslMechanism"
            }
         ]
      },
      "sasl_username": {
         "title": "SASL username",
         "description": "The username to use for SASL authentication. This is only needed if SASL authentication is enabled.",
         "env": "KAFKA_SASL_USERNAME",
         "env_names": "{'kafka_sasl_username'}",
         "type": "string"
      },
      "sasl_password": {
         "title": "SASL password",
         "description": "The password to use for SASL authentication. This is only needed if SASL authentication is enabled.",
         "env": "KAFKA_SASL_PASSWORD",
         "env_names": "{'kafka_sasl_password'}",
         "type": "string",
         "writeOnly": true,
         "format": "password"
      }
   },
   "required": [
      "bootstrap_servers"
   ],
   "additionalProperties": false,
   "definitions": {
      "KafkaSecurityProtocol": {
         "title": "KafkaSecurityProtocol",
         "description": "Kafka security protocols understood by aiokafka.",
         "enum": [
            "PLAINTEXT",
            "SSL"
         ],
         "type": "string"
      },
      "KafkaSaslMechanism": {
         "title": "KafkaSaslMechanism",
         "description": "Kafka SASL mechanisms understood by aiokafka.",
         "enum": [
            "PLAIN",
            "SCRAM-SHA-256",
            "SCRAM-SHA-512"
         ],
         "type": "string"
      }
   }
}

Fields:
field bootstrap_servers: str [Required]#

A comma-separated list of Kafka brokers to connect to. This should be a list of hostnames or IP addresses, each optionally followed by a port number, separated by commas. For example: kafka-1:9092,kafka-2:9092,kafka-3:9092.

field cert_temp_dir: Optional[DirectoryPath] = None#

Temporary writable directory for concatenating certificates.

Constraints:
  • format = directory-path

field client_ca_path: Optional[FilePath] = None#

The path to the client CA certificate file to use for authentication. This is only needed when the client certificate needs to beconcatenated with the client CA certificate, which is commonfor Strimzi installations.

Constraints:
  • format = file-path

field client_cert_path: Optional[FilePath] = None#

The path to the client certificate file to use for authentication. This is only needed if the broker is configured to require SSL client authentication.

Constraints:
  • format = file-path

field client_key_password: Optional[SecretStr] = None#

The password to use for decrypting the client key file. This is only needed if the client key file is encrypted.

Constraints:
  • type = string

  • writeOnly = True

  • format = password

field client_key_path: Optional[FilePath] = None#

The path to the client key file to use for authentication. This is only needed if the broker is configured to require SSL client authentication.

Constraints:
  • format = file-path

field cluster_ca_path: Optional[FilePath] = None#

The path to the CA certificate file to use for verifying the broker’s certificate. This is only needed if the broker’s certificate is not signed by a CA trusted by the operating system.

Constraints:
  • format = file-path

field sasl_mechanism: Optional[KafkaSaslMechanism] = KafkaSaslMechanism.PLAIN#

The SASL mechanism to use for authentication. This is only needed if SASL authentication is enabled.

field sasl_password: Optional[SecretStr] = None#

The password to use for SASL authentication. This is only needed if SASL authentication is enabled.

Constraints:
  • type = string

  • writeOnly = True

  • format = password

field sasl_username: Optional[str] = None#

The username to use for SASL authentication. This is only needed if SASL authentication is enabled.

field security_protocol: KafkaSecurityProtocol = KafkaSecurityProtocol.PLAINTEXT#

The security protocol to use when connecting to Kafka.

property ssl_context: SSLContext | None#

An SSL context for connecting to Kafka with aiokafka, if the Kafka connection is configured to use SSL.

class kafkit.settings.KafkaSecurityProtocol(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Kafka security protocols understood by aiokafka.

class kafkit.settings.KafkaSaslMechanism(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)#

Kafka SASL mechanisms understood by aiokafka.