Background
In the previous article I wrote about how to configure your application using Sitri , however, I missed the moment with local development, since you will agree that it is not very convenient to deploy a vault locally, and store the local config in a common vault, especially if it is on a project several people work - doubly inconvenient.
Sitri — , , json .
, , local_mode , ENV = "local" :) , provider_config.py BaseConfig Config . , , , .
import hvac
from sitri.providers.contrib.system import SystemConfigProvider
from sitri.providers.contrib.vault import VaultKVConfigProvider
from sitri.settings.contrib.vault import VaultKVSettings
configurator = SystemConfigProvider(prefix="superapp")
ENV = configurator.get("env")
is_local_mode = ENV == "local"
local_mode_file_path = configurator.get("local_mode_file_path")
def vault_client_factory() -> hvac.Client:
client = hvac.Client(url=configurator.get("vault_api"))
client.auth_approle(
role_id=configurator.get("role_id"),
secret_id=configurator.get("secret_id"),
)
return client
provider = VaultKVConfigProvider(
vault_connector=vault_client_factory,
mount_point=f"{configurator.get('app_name')}/{ENV}",
)
class BaseConfig(VaultKVSettings.VaultKVSettingsConfig):
provider = provider
local_mode = is_local_mode
local_provider_args = {"json_path": local_mode_file_path}
local_provider_args JsonConfigProvider, , — - . , , local_provider.
, . , Kafka :
from typing import Any, Dict
from pydantic import Field
from sitri.settings.contrib.vault import VaultKVSettings
from superapp.config.provider_config import BaseConfig, configurator
class KafkaSettings(VaultKVSettings):
mechanism: str = Field(..., vault_secret_key="auth_mechanism")
brokers: str = Field(...)
auth_data: Dict[str, Any] = Field(...)
class Config(BaseConfig):
default_secret_path = "kafka"
default_mount_point = f"{configurator.get('app_name')}/common"
local_mode_path_prefix = "kafka"
. local_mode_path_prefix , json . , json :
{
"db":
{
"host": "testhost",
"password": "testpassword",
"port": 1234,
"user": "testuser"
},
"faust":
{
"agents":
{
"X":
{
"concurrency": 2,
"partitions": 5
}
},
"app_name": "superapp-workers",
"default_concurrency": 5,
"default_partitions_count": 10
},
"kafka":
{
"auth_data":
{
"password": "testpassword",
"username": "testuser"
},
"brokers": "kafka://test",
"mechanism": "SASL_PLAINTEXT"
}
}
… , . , . , main.py __main__.py, docker-compose.
, — Dockerfile:
FROM python:3.8.3-buster ENV PYTHONUNBUFFERED=1 \ POETRY_VIRTUALENVS_CREATE=false \ POETRY_VIRTUALENVS_IN_PROJECT=false \ POETRY_NO_INTERACTION=1 RUN pip install poetry RUN mkdir /superapp/ WORKDIR /superapp/ COPY ./pyproject.toml ./poetry.lock /superapp/ RUN poetry install --no-ansi WORKDIR /
, , .
, env- local-mode :
SUPERAPP_ENV=local SUPERAPP_LOCAL_MODE_FILE_PATH=/config.json SUPERAPP_APP_NAME=superapp
, , Vault , , Vault.
, — docker-compose.yml :
# docker-compose config for local development version: '3' services: superapp: command: python3 -m superapp restart: always build: context: ./ dockerfile: Dockerfile volumes: - ./superapp:/superapp - ./config.json:/config.json env_file: - .env.local
As you can see, everything is simple here too. We put our json file in the root, as indicated above in the environment variable for the container.
Now, launch:
docker-compose up
Creating article_sitri_vault_pydantic_superapp_1 ... done
Attaching to article_sitri_vault_pydantic_superapp_1
superapp_1 | db=DBSettings(user='testuser', password='testpassword', host='testhost', port=1234) faust=FaustSettings(app_name='superapp-workers', default_partitions_count=10, default_concurrency=5, agents={'X': AgentConfig(partitions=5, concurrency=2)}) kafka=KafkaSettings(mechanism='SASL_PLAINTEXT', brokers='kafka://test', auth_data={'password': 'testpassword', 'username': 'testuser'})
superapp_1 | {'db': {'user': 'testuser', 'password': 'testpassword', 'host': 'testhost', 'port': 1234}, 'faust': {'app_name': 'superapp-workers', 'default_partitions_count': 10, 'default_concurrency': 5, 'agents': {'X': {'partitions': 5, 'concurrency': 2}}}, 'kafka': {'mechanism': 'SASL_PLAINTEXT', 'brokers': 'kafka://test', 'auth_data': {'password': 'testpassword', 'username': 'testuser'}}}
As you can see, everything started successfully and the information from our json file successfully passed all checks and became settings for the local version of the application, yuhhu!
I put the code for this article in a separate branch, so you can go in, tilt and test everything yourself :)
branch