Skip to content

Configure CShelve from a Python dictionary

open_from_dict() lets you pass configuration directly as a Python dictionary instead of a .ini file. This is ideal when you generate settings at runtime, pull secrets from a vault, or run in environments where writing to disk is inconvenient.


  • No temp files – Great for serverless or read-only filesystems.
  • Dynamic config – Build settings from environment variables or secret managers.
  • Same structure – Mirrors the .ini format, so examples are easy to translate.

import os
import cshelve
config = {
"default": {
"provider": "azure-blob",
"auth_type": "passwordless",
"account_url": "https://<account>.blob.core.windows.net"
"container_name": "standard"
},
}
with cshelve.open_from_dict(config) as db:
db["hello"] = "world"
print(db["hello"])

cshelve.open_from_dict(config: dict, *args, **kwargs)
  • config – A dictionary mirroring the .ini structure (sections + key/value pairs).
  • *args, **kwargs – Same flags and options supported by cshelve.open().

config = {
"default": {
"provider": "azure-blob",
"auth_type": "passwordless",
"account_url": "https://myaccount.blob.core.windows.net",
"container_name": "mycontainer",
}
}
with cshelve.open_from_dict(config) as db:
db["key"] = "value"
import os
config = {
"default": {
"provider": "azure-blob",
"auth_type": "connection_string",
"environment_key": "AZURE_STORAGE_CONNECTION_STRING",
"container_name": "standard",
},
"logging": {"http": True, "credentials": False, "level": "INFO"},
}
os.environ["AZURE_STORAGE_CONNECTION_STRING"] = "<your-conn-string>"
with cshelve.open_from_dict(config) as db:
db["config_type"] = "dict"
import os
config = {
"default": {
"provider": "aws-s3",
"auth_type": "access_key",
"bucket_name": "mybucket",
"key_id": "$AWS_KEY_ID",
"key_secret": "$AWS_KEY_SECRET",
}
}
os.environ["AWS_KEY_ID"] = "AKIA...snip..."
os.environ["AWS_KEY_SECRET"] = "secret...snip..."
with cshelve.open_from_dict(config) as db:
db["cloud_key"] = "Stored in S3!"
import os
config = {
"default": {
"provider": "sftp",
"hostname": "$SFTP_PASSWORD_HOSTNAME",
"username": "$SFTP_USERNAME",
"password": "$SFTP_PASSWORD",
"auth_type": "password",
},
"provider_params": {
"remote_path": "myuser"
}
}
os.environ.update({
"SFTP_PASSWORD_HOSTNAME": "sftp.example.com",
"SFTP_USERNAME": "myuser",
"SFTP_PASSWORD": "mypassword",
})
with cshelve.open_from_dict(config) as db:
db["local_backup"] = "Stored via SFTP"

Multi-Provider with Provider-Specific Compression

Section titled “Multi-Provider with Provider-Specific Compression”

Store data to both local filesystem and AWS S3 simultaneously, with different compression levels:

import os
config = {
"default": {
"providers": "filesystem, s3",
"provider_routing": "all",
},
"filesystem": {
"provider": "filesystem",
"folder_path": "/data/local",
},
"filesystem.compression": {
"algorithm": "zlib",
"level": "6",
},
"s3": {
"provider": "aws-s3",
"bucket_name": "my-backup-bucket",
"auth_type": "access_key",
"key_id": "$AWS_ACCESS_KEY_ID",
"key_secret": "$AWS_SECRET_ACCESS_KEY",
},
"s3.compression": {
"algorithm": "zlib",
"level": "9",
},
}
os.environ.update({
"AWS_ACCESS_KEY_ID": "AKIA...snip...",
"AWS_SECRET_ACCESS_KEY": "secret...snip...",
})
with cshelve.open_from_dict(config) as db:
# Written to both filesystem (level 6) and S3 (level 9)
db["backup_data"] = "Replicated to local and cloud"
# Read from first provider (filesystem) - fast access
print(db["backup_data"])

In this example:

  • Filesystem uses compression level 6 (moderate – faster I/O)
  • AWS S3 uses compression level 9 (maximum – cheaper bandwidth)
  • All writes are replicated to both providers
  • Reads only use the filesystem (first provider)