Compare commits
6 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| cb99c3911c | |||
| f0a109983b | |||
| 96d25e0e85 | |||
| 04ce86a43e | |||
| 1c2bd11212 | |||
| bc4ea9b101 |
106 changed files with 544 additions and 5205 deletions
9
.gitignore
vendored
9
.gitignore
vendored
|
|
@ -24,12 +24,3 @@ dist/
|
|||
.err
|
||||
.vscode
|
||||
/run.sh
|
||||
ROADMAP.md
|
||||
aliases/*/src
|
||||
docs/_build
|
||||
docs/_static
|
||||
docs/templates
|
||||
.coverage
|
||||
|
||||
# changes during CD/CI
|
||||
aliases/*/pyproject.toml
|
||||
|
|
|
|||
|
|
@ -1,17 +0,0 @@
|
|||
version: 2
|
||||
|
||||
build:
|
||||
os: ubuntu-24.04
|
||||
tools:
|
||||
python: "3.13"
|
||||
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
python:
|
||||
install:
|
||||
- method: pip
|
||||
path: .
|
||||
extra_requirements:
|
||||
- docs
|
||||
- full
|
||||
182
CHANGELOG.md
182
CHANGELOG.md
|
|
@ -1,190 +1,12 @@
|
|||
# Changelog
|
||||
|
||||
## 0.12.0 "The Color Update"
|
||||
|
||||
* All `AuthSrc()` derivatives, deprecated and never used, have been removed.
|
||||
* New module `mat` adds a shallow reimplementation of `Matrix()` in order to implement matrix multiplication
|
||||
* Removed obsolete `configparse` implementation that has been around since 0.3 and shelved since 0.4.
|
||||
* `color`: added support for conversion from RGB to sRGB, XYZ, OKLab and OKLCH.
|
||||
|
||||
## 0.11.2
|
||||
|
||||
+ increase test coverage of `validators`
|
||||
|
||||
## 0.11.1
|
||||
|
||||
+ make `yesno()` accept boolean types
|
||||
|
||||
## 0.11.0
|
||||
|
||||
+ **Breaking**: sessions returned by `SQLAlchemy()` are now wrapped by default. Restore original behavior by passing `wrap=False` to the constructor or to `begin()`
|
||||
+ Slate unused `require_auth()` and derivatives for removal in 0.14.0
|
||||
+ Add `cb32lencode()`
|
||||
+ `Snowflake()`: add `.from_cb32()`, `.from_base64()`, `.from_oct()`, `.from_hex()` classmethods
|
||||
+ Add `SpitText()`
|
||||
+ Add `Lawyer()` with seven methods
|
||||
+ Style changes to docstrings
|
||||
|
||||
## 0.10.2 and 0.7.11
|
||||
|
||||
+ fix incorrect types on `cb32decode()`
|
||||
|
||||
## 0.10.1 and 0.7.10
|
||||
|
||||
+ `peewee`: fix missing imports
|
||||
|
||||
## 0.10.0
|
||||
|
||||
+ `peewee`: add `SnowflakeField` class
|
||||
|
||||
## 0.9.0
|
||||
|
||||
+ Fix to make experimental `Waiter` usable
|
||||
+ Suspend `glue()` release indefinitely
|
||||
+ Add `yesno()`
|
||||
+ Document validators
|
||||
|
||||
## 0.8.2 and 0.7.9
|
||||
|
||||
+ `.color`: fix `chalk` not behaving as expected
|
||||
|
||||
## 0.8.1 and 0.7.8
|
||||
|
||||
+ Fix missing type guard in `unbound_fk()` and `bound_fk()`
|
||||
|
||||
## 0.8.0
|
||||
|
||||
+ Add `username_column()` to `.sqlalchemy`
|
||||
+ Improve (experimental) `Waiter`
|
||||
|
||||
## 0.7.7
|
||||
|
||||
+ Fix imports in `.sqlalchemy`
|
||||
|
||||
## 0.7.5
|
||||
|
||||
+ Delay release of `FakeModule` to 0.9.0
|
||||
+ Update dependencies: `.sqlalchemy` now requires `flask_sqlalchemy` regardless of use of Flask
|
||||
|
||||
## 0.7.4
|
||||
|
||||
+ Delay release of `@glue()`
|
||||
+ Add docs and some tests to `.iding`
|
||||
+ Fix bug in `SiqGen()` that may prevent generation in short amounts of time
|
||||
|
||||
## 0.7.3
|
||||
|
||||
+ Fixed some broken imports in `.sqlalchemy`
|
||||
+ Stage `@glue()` for release in ~~0.8.0~~ 0.9.0
|
||||
+ Add docs to `.sqlalchemy`
|
||||
|
||||
## 0.7.2
|
||||
|
||||
+ `@future()` now can take a `version=` argument
|
||||
+ `Waiter()` got marked `@future` indefinitely
|
||||
+ Stage `username_column()` for release in 0.8.0
|
||||
+ Explicit support for Python 3.14 (aka python pi)
|
||||
|
||||
## 0.7.1
|
||||
|
||||
+ Add documentation ([Read The Docs](https://suou.readthedocs.io/))
|
||||
+ Improved decorator typing
|
||||
|
||||
## 0.7.0 "The Lucky Update"
|
||||
|
||||
+ Add RNG/random selection overloads such as `luck()`, `rng_overload()`
|
||||
+ Add 7 new throwable exceptions
|
||||
+ Add color utilities: `chalk` object and `WebColor()`
|
||||
+ Add `.terminal` module, to ease TUI development
|
||||
+ `calendar`: add `parse_time()`
|
||||
+ Add validators `not_greater_than()`, `not_less_than()`
|
||||
+ Add `@future()` decorator: it signals features not yet intended to be public, for instance, backported as a part of a bug fix.
|
||||
|
||||
## 0.6.1
|
||||
|
||||
- First release on PyPI under the name `suou`.
|
||||
- **BREAKING**: if you installed `sakuragasaki46-suou<=0.6.0` you need to uninstall and reinstall or things may break.
|
||||
- Fix `sqlalchemy.asyncio.SQLAlchemy()` to use context vars; `expire_on_commit=` is now configurable at instantiation. Fix some missing re-exports.
|
||||
|
||||
## 0.6.0
|
||||
|
||||
+ `.sqlalchemy` has been made a subpackage and split; `sqlalchemy_async` (moved to `sqlalchemy.asyncio`) has been deprecated. Update your imports.
|
||||
+ Add several new utilities to `.sqlalchemy`: `BitSelector`, `secret_column`, `a_relationship`, `SessionWrapper`,
|
||||
`wrap=` argument to SQLAlchemy. Also removed dead batteries
|
||||
+ Add `.waiter` module. For now, non-functional ~
|
||||
+ Add `ArgConfigSource` to `.configparse`
|
||||
+ Add Z85 (`z85encode()` `z85decode()`) encoding support
|
||||
+ Add more strings to `.legal` module
|
||||
+ `.signing` module is now covered by tests
|
||||
+ New decorator `dei_args()`. Now offensive naming is no more a worry!
|
||||
|
||||
## 0.5.3
|
||||
|
||||
- Added docstring to `SQLAlchemy()`.
|
||||
- More type fixes.
|
||||
|
||||
## 0.5.2
|
||||
|
||||
- Fixed poorly handled merge conflict leaving `.sqlalchemy` modulem unusable
|
||||
|
||||
## 0.5.1
|
||||
|
||||
- Fixed return types for `.sqlalchemy` module
|
||||
|
||||
## 0.5.0
|
||||
|
||||
+ `sqlalchemy`: add `unbound_fk()`, `bound_fk()`
|
||||
+ Add `sqlalchemy_async` module with `SQLAlchemy()` async database binding.
|
||||
* Supports being used as an async context manager
|
||||
* Automatically handles commit and rollback
|
||||
+ `sqlalchemy_async` also offers `async_query()`
|
||||
+ Changed `sqlalchemy.parent_children()` to use `lazy='selectin'` by default
|
||||
+ Add `timed_cache()`, `alru_cache()`, `TimedDict()`, `none_pass()`, `twocolon_list()`, `quote_css_string()`, `must_be()`
|
||||
+ Add module `calendar` with `want_*` date type conversion utilities and `age_and_days()`
|
||||
+ Move obsolete stuff to `obsolete` package (includes configparse 0.3 as of now)
|
||||
+ Add `redact` module with `redact_url_password()`
|
||||
+ Add more exceptions: `NotFoundError()`, `BabelTowerError()`
|
||||
+ Add `sass` module
|
||||
+ Add `quart` module with `negotiate()`, `add_rest()`, `add_i18n()`, `WantsContentType`
|
||||
+ Add `dei` module: it implements a compact and standardized representation for pronouns, inspired by the one in use at PronounDB
|
||||
|
||||
## 0.4.1
|
||||
|
||||
- Fixed return types for `.sqlalchemy` module.
|
||||
- `sqlalchemy.parent_children()` now takes a `lazy` parameter. Backported from 0.5.1.
|
||||
|
||||
## 0.4.0
|
||||
|
||||
+ `pydantic` is now a hard dependency
|
||||
+ `ConfigProperty` has now been generalized: check out `classtools.ValueProperty`
|
||||
+ **BREAKING**: Changed the behavior of `makelist()`: **different behavior when used with callables**.
|
||||
* When applied as a decorator on callable, it converts its return type to a list.
|
||||
* Pass `wrap=False` to treat callables as simple objects, restoring the 0.3 behavior.
|
||||
+ New module `lex` to make tokenization more affordable — with functions `symbol_table()` and `lex()`
|
||||
+ Add `dorks` module and `flask.harden()`. `dorks` contains common endpoints which may be target by hackers
|
||||
+ Add `sqlalchemy.bool_column()`: make making flags painless
|
||||
+ Introduce `rb64encode()` and `rb64decode()` to deal with issues about Base64 and padding
|
||||
* `b64encode()` and `b64decode()` pad to the right
|
||||
* `rb64encode()` and `rb64decode()` pad to the left, then strip leading `'A'` in output
|
||||
+ Added `addattr()`, `PrefixIdentifier()`, `mod_floor()`, `mod_ceil()`
|
||||
+ First version to have unit tests! (Coverage is not yet complete)
|
||||
|
||||
## 0.3.8
|
||||
|
||||
- Fixed return types for `.sqlalchemy` module.
|
||||
- `sqlalchemy.parent_children()` now takes a `lazy` parameter. Backported from 0.5.1.
|
||||
|
||||
## 0.3.7
|
||||
|
||||
- Fixed a bug in `b64decode()` padding handling which made the function inconsistent and non injective. Now, leading `'A'` is NEVER stripped.
|
||||
|
||||
## 0.3.6
|
||||
|
||||
- Fixed `ConfigValue` behavior with multiple sources. It used to iterate through all the sources, possibly overwriting; now, iteration stops at first non-missing value
|
||||
👀
|
||||
|
||||
## 0.3.5
|
||||
|
||||
- Fixed cb32 handling. Now leading zeros in SIQ's are stripped, and `.from_cb32()` was implemented
|
||||
- Fixed cb32 handling. Now leading zeros in SIQ's are stripped, and `.from_cb32()` was implemented.
|
||||
|
||||
## 0.3.4
|
||||
|
||||
|
|
|
|||
26
README.md
26
README.md
|
|
@ -1,12 +1,8 @@
|
|||
# SIS Unified Object Underarmor
|
||||
|
||||
Good morning, my brother! Welcome **SUOU** (**S**IS **U**nified **O**bject **U**nderarmor), the Python library which speeds up and makes it pleasing to develop API, database schemas and stuff in Python.
|
||||
Good morning, my brother! Welcome the SUOU (SIS Unified Object Underarmor), a library for the management of the storage of objects into a database.
|
||||
|
||||
It provides utilities such as:
|
||||
* SIQ ([specification](https://yusur.moe/protocols/siq.html) - [copy](https://suou.readthedocs.io/en/latest/iding.html))
|
||||
* signing and generation of access tokens, on top of [ItsDangerous](https://github.com/pallets/itsdangerous) *not tested and not working*
|
||||
* helpers for use in Flask, [SQLAlchemy](https://suou.readthedocs.io/en/latest/sqlalchemy.html), and other popular frameworks
|
||||
* i forgor 💀
|
||||
It provides utilities such as [SIQ](https://yusur.moe/protocols/siq.html), signing and generation of access tokens (on top of [ItsDangerous](https://github.com/pallets/itsdangerous)) and various utilities, including helpers for use in Flask and SQLAlchemy.
|
||||
|
||||
**It is not an ORM** nor a replacement of it; it works along existing ORMs (currently only SQLAlchemy is supported lol).
|
||||
|
||||
|
|
@ -26,22 +22,6 @@ $ pip install sakuragasaki46-suou[sqlalchemy]
|
|||
|
||||
Please note that you probably already have those dependencies, if you just use the library.
|
||||
|
||||
## Features
|
||||
|
||||
Read the [documentation](https://suou.readthedocs.io/).
|
||||
|
||||
## Support
|
||||
|
||||
Just a heads up: SUOU was made to support Sakuragasaki46 (me)'s own selfish, egoistic needs. Not certainly to provide a service to the public.
|
||||
|
||||
As a consequence, 'add this add that' stuff is best-effort.
|
||||
|
||||
Expect breaking changes, disruptive renames in bugfix releases, sudden deprecations, years of unmantainment, or sudden removal of SUOU from GH or pip.
|
||||
|
||||
Don't want to depend on my codebase for moral reasons (albeit unrelated)? It's fine. I did not ask you.
|
||||
|
||||
**DO NOT ASK TO MAKE SUOU SAFE FOR CHILDREN**. Enjoy having your fingers cut.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under the [Apache License, Version 2.0](LICENSE), a non-copyleft free and open source license.
|
||||
|
|
@ -52,5 +32,3 @@ I (sakuragasaki46) may NOT be held accountable for Your use of my code.
|
|||
|
||||
> It's pointless to file a lawsuit because you feel damaged, and it's only going to turn against you. What a waste of money you could have spent on a vacation or charity, or invested in stocks.
|
||||
|
||||
Happy hacking.
|
||||
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
moved to [suou](https://pypi.org/project/suou)
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
43
docs/api.rst
43
docs/api.rst
|
|
@ -1,43 +0,0 @@
|
|||
API
|
||||
===
|
||||
|
||||
.. autosummary::
|
||||
:toctree: generated
|
||||
:recursive:
|
||||
|
||||
suou.sqlalchemy
|
||||
suou.asgi
|
||||
suou.bits
|
||||
suou.calendar
|
||||
suou.classtools
|
||||
suou.codecs
|
||||
suou.collections
|
||||
suou.color
|
||||
suou.configparse
|
||||
suou.dei
|
||||
suou.dorks
|
||||
suou.exceptions
|
||||
suou.flask_restx
|
||||
suou.flask_sqlalchemy
|
||||
suou.flask
|
||||
suou.functools
|
||||
suou.http
|
||||
suou.i18n
|
||||
suou.iding
|
||||
suou.itertools
|
||||
suou.legal
|
||||
suou.lex
|
||||
suou.luck
|
||||
suou.markdown
|
||||
suou.migrate
|
||||
suou.peewee
|
||||
suou.quart
|
||||
suou.redact
|
||||
suou.sass
|
||||
suou.signing
|
||||
suou.snowflake
|
||||
suou.strtools
|
||||
suou.terminal
|
||||
suou.validators
|
||||
suou.waiter
|
||||
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
|
||||
Color
|
||||
=====
|
||||
|
||||
.. currentmodule:: suou.color
|
||||
|
||||
...
|
||||
|
||||
Web colors
|
||||
----------
|
||||
|
||||
.. autoclass:: RGBColor
|
||||
|
||||
|
||||
.. autoclass:: WebColor
|
||||
|
||||
|
||||
.. autoclass:: XYZColor
|
||||
|
||||
53
docs/conf.py
53
docs/conf.py
|
|
@ -1,53 +0,0 @@
|
|||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# For the full list of built-in configuration values, see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path("..", "src").resolve()))
|
||||
|
||||
project = 'suou'
|
||||
copyright = '2025 Sakuragasaki46'
|
||||
author = 'Sakuragasaki46'
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||
|
||||
extensions = [
|
||||
"sphinx.ext.autodoc",
|
||||
'sphinx.ext.autosummary',
|
||||
'sphinx_rtd_theme'
|
||||
]
|
||||
|
||||
templates_path = ['_templates']
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
|
||||
autodoc_mock_imports = [
|
||||
"toml",
|
||||
"starlette",
|
||||
"itsdangerous",
|
||||
#"pydantic",
|
||||
"quart_schema"
|
||||
]
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme_path = ["_themes", ]
|
||||
html_static_path = ['_static']
|
||||
|
||||
def polish_module_docstring(app, what, name, obj, options, lines):
|
||||
if what == "module" and 'members' in options:
|
||||
try:
|
||||
del lines[lines.index('---'):]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def setup(app):
|
||||
app.connect("autodoc-process-docstring", polish_module_docstring)
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
suou.asgi
|
||||
=========
|
||||
|
||||
.. automodule:: suou.asgi
|
||||
|
||||
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
suou.bits
|
||||
=========
|
||||
|
||||
.. automodule:: suou.bits
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
count_ones
|
||||
join_bits
|
||||
mask_shift
|
||||
mod_ceil
|
||||
mod_floor
|
||||
split_bits
|
||||
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
suou.calendar
|
||||
=============
|
||||
|
||||
.. automodule:: suou.calendar
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
age_and_days
|
||||
parse_time
|
||||
want_datetime
|
||||
want_isodate
|
||||
want_timestamp
|
||||
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
suou.classtools
|
||||
===============
|
||||
|
||||
.. automodule:: suou.classtools
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
Incomplete
|
||||
MissingType
|
||||
ValueProperty
|
||||
ValueSource
|
||||
Wanted
|
||||
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
suou.codecs
|
||||
===========
|
||||
|
||||
.. automodule:: suou.codecs
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
b2048decode
|
||||
b2048encode
|
||||
b32ldecode
|
||||
b32lencode
|
||||
b64decode
|
||||
b64encode
|
||||
cb32decode
|
||||
cb32encode
|
||||
cb32lencode
|
||||
jsonencode
|
||||
quote_css_string
|
||||
rb64decode
|
||||
rb64encode
|
||||
ssv_list
|
||||
twocolon_list
|
||||
want_bytes
|
||||
want_str
|
||||
want_urlsafe
|
||||
want_urlsafe_bytes
|
||||
z85encode
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
StringCase
|
||||
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
suou.collections
|
||||
================
|
||||
|
||||
.. automodule:: suou.collections
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
TimedDict
|
||||
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
suou.color
|
||||
==========
|
||||
|
||||
.. automodule:: suou.color
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
Chalk
|
||||
RGBColor
|
||||
SRGBColor
|
||||
WebColor
|
||||
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
suou.configparse
|
||||
================
|
||||
|
||||
.. automodule:: suou.configparse
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
ArgConfigSource
|
||||
ConfigOptions
|
||||
ConfigParserConfigSource
|
||||
ConfigSource
|
||||
ConfigValue
|
||||
DictConfigSource
|
||||
EnvConfigSource
|
||||
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
suou.dei
|
||||
========
|
||||
|
||||
.. automodule:: suou.dei
|
||||
|
||||
|
||||
.. rubric:: Module Attributes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
BRICKS
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
dei_args
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
Pronoun
|
||||
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
suou.dorks
|
||||
==========
|
||||
|
||||
.. automodule:: suou.dorks
|
||||
|
||||
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
suou.exceptions
|
||||
===============
|
||||
|
||||
.. automodule:: suou.exceptions
|
||||
|
||||
|
||||
.. rubric:: Exceptions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
BabelTowerError
|
||||
BadLuckError
|
||||
BrokenStringsError
|
||||
Fahrenheit451Error
|
||||
FuckAroundFindOutError
|
||||
InconsistencyError
|
||||
LexError
|
||||
MissingConfigError
|
||||
MissingConfigWarning
|
||||
NotFoundError
|
||||
PoliticalError
|
||||
PoliticalWarning
|
||||
TerminalRequiredError
|
||||
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
suou.flask
|
||||
==========
|
||||
|
||||
.. automodule:: suou.flask
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
add_context_from_config
|
||||
add_i18n
|
||||
get_flask_conf
|
||||
harden
|
||||
negotiate
|
||||
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
suou.flask\_restx
|
||||
=================
|
||||
|
||||
.. automodule:: suou.flask_restx
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
output_json
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
Api
|
||||
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
suou.flask\_sqlalchemy
|
||||
======================
|
||||
|
||||
.. automodule:: suou.flask_sqlalchemy
|
||||
|
||||
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
suou.functools
|
||||
==============
|
||||
|
||||
.. automodule:: suou.functools
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
alru_cache
|
||||
deprecated_alias
|
||||
flat_args
|
||||
future
|
||||
none_pass
|
||||
not_implemented
|
||||
timed_cache
|
||||
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
suou.http
|
||||
=========
|
||||
|
||||
.. automodule:: suou.http
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
WantsContentType
|
||||
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
suou.i18n
|
||||
=========
|
||||
|
||||
.. automodule:: suou.i18n
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
I18n
|
||||
I18nLang
|
||||
IdentityLang
|
||||
JsonI18n
|
||||
TomlI18n
|
||||
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
suou.iding
|
||||
==========
|
||||
|
||||
.. automodule:: suou.iding
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
make_domain_hash
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
Siq
|
||||
SiqCache
|
||||
SiqFormatType
|
||||
SiqGen
|
||||
SiqType
|
||||
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
suou.itertools
|
||||
==============
|
||||
|
||||
.. automodule:: suou.itertools
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
addattr
|
||||
additem
|
||||
kwargs_prefix
|
||||
ltuple
|
||||
makelist
|
||||
rtuple
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
hashed_list
|
||||
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
suou.legal
|
||||
==========
|
||||
|
||||
.. automodule:: suou.legal
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
Lawyer
|
||||
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
suou.lex
|
||||
========
|
||||
|
||||
.. currentmodule:: suou
|
||||
|
||||
.. autofunction:: lex
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
suou.luck
|
||||
=========
|
||||
|
||||
.. automodule:: suou.luck
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
lucky
|
||||
rng_overload
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
RngCallable
|
||||
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
suou.markdown
|
||||
=============
|
||||
|
||||
.. automodule:: suou.markdown
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
MentionPattern
|
||||
PingExtension
|
||||
SpoilerExtension
|
||||
StrikethroughExtension
|
||||
StrikethroughPostprocessor
|
||||
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
suou.migrate
|
||||
============
|
||||
|
||||
.. automodule:: suou.migrate
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
SiqMigrator
|
||||
SnowflakeSiqMigrator
|
||||
UlidSiqMigrator
|
||||
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
suou.peewee
|
||||
===========
|
||||
|
||||
.. automodule:: suou.peewee
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
connect_reconnect
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
ConnectToDatabase
|
||||
PeeweeConnectionState
|
||||
ReconnectMysqlDatabase
|
||||
RegexCharField
|
||||
SiqField
|
||||
SnowflakeField
|
||||
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
suou.quart
|
||||
==========
|
||||
|
||||
.. automodule:: suou.quart
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
add_i18n
|
||||
add_rest
|
||||
negotiate
|
||||
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
suou.redact
|
||||
===========
|
||||
|
||||
.. automodule:: suou.redact
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
redact_url_password
|
||||
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
suou
|
||||
====
|
||||
|
||||
.. automodule:: suou
|
||||
|
||||
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
suou.sass
|
||||
=========
|
||||
|
||||
.. automodule:: suou.sass
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
SassAsyncMiddleware
|
||||
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
suou.signing
|
||||
============
|
||||
|
||||
.. automodule:: suou.signing
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
HasSigner
|
||||
UserSigner
|
||||
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
suou.snowflake
|
||||
==============
|
||||
|
||||
.. automodule:: suou.snowflake
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
Snowflake
|
||||
SnowflakeGen
|
||||
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
suou.sqlalchemy.asyncio
|
||||
=======================
|
||||
|
||||
.. automodule:: suou.sqlalchemy.asyncio
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
async_query
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
AsyncSelectPagination
|
||||
SQLAlchemy
|
||||
SessionWrapper
|
||||
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
suou.sqlalchemy.orm
|
||||
===================
|
||||
|
||||
.. automodule:: suou.sqlalchemy.orm
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
a_relationship
|
||||
age_pair
|
||||
author_pair
|
||||
bool_column
|
||||
bound_fk
|
||||
declarative_base
|
||||
entity_base
|
||||
id_column
|
||||
match_column
|
||||
match_constraint
|
||||
parent_children
|
||||
secret_column
|
||||
snowflake_column
|
||||
unbound_fk
|
||||
username_column
|
||||
want_column
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
BitSelector
|
||||
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
suou.sqlalchemy
|
||||
===============
|
||||
|
||||
.. automodule:: suou.sqlalchemy
|
||||
|
||||
|
||||
.. rubric:: Module Attributes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
IdType
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
create_session
|
||||
require_auth_base
|
||||
token_signer
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
AuthSrc
|
||||
|
||||
.. rubric:: Modules
|
||||
|
||||
.. autosummary::
|
||||
:toctree:
|
||||
:recursive:
|
||||
|
||||
asyncio
|
||||
orm
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
suou.strtools
|
||||
=============
|
||||
|
||||
.. automodule:: suou.strtools
|
||||
|
||||
|
||||
.. rubric:: Classes
|
||||
|
||||
.. autosummary::
|
||||
|
||||
PrefixIdentifier
|
||||
SpitText
|
||||
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
suou.terminal
|
||||
=============
|
||||
|
||||
.. automodule:: suou.terminal
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
terminal_required
|
||||
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
suou.validators
|
||||
===============
|
||||
|
||||
.. automodule:: suou.validators
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
matches
|
||||
must_be
|
||||
not_greater_than
|
||||
not_less_than
|
||||
yesno
|
||||
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
suou.waiter
|
||||
===========
|
||||
|
||||
.. automodule:: suou.waiter
|
||||
|
||||
|
||||
.. rubric:: Functions
|
||||
|
||||
.. autosummary::
|
||||
|
||||
Waiter
|
||||
ko
|
||||
ok
|
||||
|
||||
197
docs/iding.rst
197
docs/iding.rst
|
|
@ -1,197 +0,0 @@
|
|||
|
||||
IDing
|
||||
=====
|
||||
|
||||
.. currentmodule:: suou.iding
|
||||
|
||||
...
|
||||
|
||||
SIQ
|
||||
---
|
||||
|
||||
The main point of the SUOU library is to provide an implementation for the methods of SIS, a protocol for information exchange in phase of definition,
|
||||
and of which SUOU is the reference implementation.
|
||||
|
||||
The key element is the ID format called SIQ, a 112-bit identifier format.
|
||||
|
||||
Here follow an extract from the `specification`_:
|
||||
|
||||
.. _specification: <https://yusur.moe/protocols/siq.html>
|
||||
|
||||
Why SIQ?
|
||||
********
|
||||
|
||||
.. highlights::
|
||||
I needed unique, compact, decentralized, reproducible and sortable identifiers for my applications.
|
||||
|
||||
Something I could reliably use as database key, as long as being fit for my purposes, in the context of a larger project, a federated protocol.
|
||||
|
||||
Why not ...
|
||||
***********
|
||||
|
||||
.. highlights::
|
||||
* **Serial numbers**? They are relative. If they needed to be absolute, they would have to be issued by a single central authority for everyone else. Unacceptable for a decentralized protocol.
|
||||
* **Username-domain identifiers**? Despite them being in use in other decentralized protocols (such as ActivityPub and Matrix), they are immutable and bound to a single domain. It means, the system sees different domains or usernames as different users. Users can't change their username after registration, therefore forcing them to carry an unpleasant or cringe handle for the rest of their life.
|
||||
* **UUID**'s? UUIDs are unreliable. Most services use UUIDv4's, which are just opaque sequences of random bytes, and definitely not optimal as database keys. Other versions exist (such as the timestamp-based [UUIDv7](https://uuidv7.org)), however they still miss something needed for cross-domain uniqueness. In any case, UUIDs need to waste some bits to specify their "protocol".
|
||||
* **Snowflake**s? Snowflakes would be a good choice, and are the inspiration for SIQ themselves. However, 64 bits are not enough for our use case, and Snowflake is *already making the necessary sacrifices* to ensure everything fits into 64 bits (i.e. the epoch got significantly moved forward).
|
||||
* **Content hashes**? They are based on content, therefore they require content to be immutable and undeletable. Also: collisions.
|
||||
* **PLC**'s (i.e. the ones in use at BlueSky)? [The implementation is cryptic](https://github.com/did-method-plc/did-method-plc). Moreover, it requires a central authority, and BlueSky is, as of now, holding the role of the sole authority. The resulting identifier as well is apparently random, therefore unorderable.
|
||||
* **ULID**'s? They are just UUIDv4's with a timestamp. Sortable? Yes. Predictable? No, random bits rely on the assumption of being generated on a single host — i.e. centralization. Think of them as yet another attempt to UUIDv7's.
|
||||
|
||||
Anatomy of a SIQ
|
||||
****************
|
||||
|
||||
|
||||
SIQ's are **112 bit** binary strings. Why 112? Why not 128? Idk, felt like it. Maybe to save space. Maybe because I could fit it into UUID some day — UUID already reserves some bits for the protocol.
|
||||
|
||||
Those 112 bits split up into:
|
||||
|
||||
* 56 bits of **timestamp**;
|
||||
* 8 bits of process ("**shard**") information;
|
||||
* 32 bits of **domain** hash;
|
||||
* 16 bits of **serial** and **qualifier**.
|
||||
|
||||
Here is a graph of a typical SIQ layout:
|
||||
|
||||
```
|
||||
0: tttttttt tttttttt tttttttt tttttttt tttttttt
|
||||
40: uuuuuuuu uuuuuuuu ssssssss dddddddd dddddddd
|
||||
80: dddddddd dddddddd nnnnnnnn nnqqqqqq
|
||||
|
||||
where:
|
||||
t : timestamp -- seconds
|
||||
u : timestamp -- fraction seconds
|
||||
s : shard
|
||||
d : domain hash
|
||||
n : progressive
|
||||
q : qualifier (variable width, in fact)
|
||||
```
|
||||
|
||||
Timestamp
|
||||
*********
|
||||
|
||||
SIQ uses 56 bits for storing timestamp:
|
||||
|
||||
- **40 bits** for **seconds**;
|
||||
- **16 bits** for **fraction seconds**.
|
||||
|
||||
There is no need to explain [why I need no less than 40 bits for seconds](https://en.wikipedia.org/wiki/Year_2038_problem).
|
||||
|
||||
Most standards — including Snowflake and ULID — store timestamp in *milliseconds*. It means the system needs to make a division by 1000 to retrieve second value.
|
||||
|
||||
But 1000 is almost 1024, right? So the last ten bits can safely be ignored and we easily obtain a UNIX timestamp by doing a right shi- wait.
|
||||
|
||||
It's more comfortable to assume that 1024 is nearly 1000. *Melius abundare quam deficere*. And injective mapping is there.
|
||||
|
||||
But rounding? Truncation? Here comes the purpose of the 6 additional trailing bits: precision control. Bits from dividing milliseconds o'clock are different from those from rounding microseconds.
|
||||
|
||||
Yes, most systems can't go beyond milliseconds for accuracy — standard Java is like that. But detecting platform accuracy is beyond my scope.
|
||||
|
||||
There are other factors to ensure uniqueness: *domain* and *shard* bits.
|
||||
|
||||
Domain, shard
|
||||
*************
|
||||
|
||||
The temporal uniqueness is ensured by timestamp. However, in a distributed, federated system there is the chance for the same ID to get generated twice by two different subjects.
|
||||
|
||||
Therefore, *spacial* uniqueness must be enforced in some way.
|
||||
|
||||
Since SIQ's are going to be used the most in web applications, a way to differentiate *spacially* different applications is via the **domain name**.
|
||||
|
||||
I decided to reserve **32 bits** for the domain hash.
|
||||
|
||||
The algorithm of choice is **SHA-256** for its well-known diffusion and collision resistance. However, 256 bits are too much to fit into a SIQ! So, the last 4 bytes are taken.
|
||||
|
||||
*...*
|
||||
|
||||
Development and testing environments may safely set all the domain bits to 0.
|
||||
|
||||
Qualifiers
|
||||
**********
|
||||
|
||||
The last 16 bits are special, in a way that makes those identifiers unique, and you can tell what is what just by looking at them.
|
||||
|
||||
Inspired by programming language implementations, such as OCaml and early JavaScript, a distinguishing bit affix differentiates among types of heterogeneous entities:
|
||||
|
||||
* terminal entities (leaves) end in ``1``. This includes content blobs, array elements, and relationships;
|
||||
* non-leaves end in ``0``.
|
||||
|
||||
The full assigment scheme (managed by me) looks like this:
|
||||
|
||||
-------------------------------------------------------
|
||||
Suffix Usage
|
||||
=======================================================
|
||||
``x00000`` user account
|
||||
``x10000`` application (e.g. API, client, bot, form)
|
||||
``x01000`` event, task
|
||||
``x11000`` product, subscription
|
||||
``x00100`` user group, membership, role
|
||||
``x10100`` collection, feed
|
||||
``x01100`` invite
|
||||
``x11100`` *unassigned*
|
||||
``x00010`` tag, category
|
||||
``x10010`` *unassigned*
|
||||
``x01010`` channel (guild, live chat, forum, wiki~)
|
||||
``x11010`` *unassigned*
|
||||
``xx0110`` thread, page
|
||||
``xx1110`` message, post, revision
|
||||
``xxx001`` 3+ fk relationship
|
||||
``xxx101`` many-to-many, hash array element
|
||||
``xxx011`` array element (one to many)
|
||||
``xxx111`` content
|
||||
--------------------------------------------------------
|
||||
|
||||
|
||||
The leftover bits are used as progressive serials, incremented as generation continues, and usually reset when timestamp is incremented.
|
||||
|
||||
Like with snowflakes and ULID's, if you happen to run out with serials, you need to wait till timestamp changes. Usually around 15 microseconds.
|
||||
|
||||
Storage
|
||||
*******
|
||||
|
||||
It is advised to store in databases as *16 byte binary strings*.
|
||||
|
||||
- In MySQL/MariaDB, it's ``VARBINARY(16)``.
|
||||
|
||||
The two extra bytes are to ease alignment, and possible expansion of timestamp range — even though it would not be an issue until some years after 10,000 CE.
|
||||
|
||||
It is possible to fit them into UUID's (specifically, UUIDv8's — custom ones), taking advantage from databases and libraries implementing a UUID type — e.g. PostgreSQL.
|
||||
|
||||
Unfortunately, nobody wants to deal with storing arbitrarily long integers — lots of issues pop up by going beyond 64. 128 bit integers are not natively supported in most places. Let alone 112 bit ones.
|
||||
|
||||
(end of extract)
|
||||
|
||||
Implementation
|
||||
**************
|
||||
|
||||
.. autoclass:: Siq
|
||||
|
||||
.. autoclass:: SiqGen
|
||||
|
||||
.. automethod:: SiqGen.__init__
|
||||
.. automethod:: SiqGen.generate
|
||||
|
||||
Snowflake
|
||||
---------
|
||||
|
||||
SUOU also implements \[the Discord flavor of\] Snowflake ID's.
|
||||
|
||||
This flavor of Snowflake requires an epoch date, and consists of:
|
||||
* 42 bits of timestamp, with millisecond precision;
|
||||
* 10 bits for, respectively, worker ID (5 bits) and shard ID (5 bits);
|
||||
* 12 bits incremented progressively.
|
||||
|
||||
|
||||
.. autoclass:: suou.snowflake.Snowflake
|
||||
|
||||
.. autoclass:: suou.snowflake.SnowflakeGen
|
||||
|
||||
|
||||
Other ID formats
|
||||
----------------
|
||||
|
||||
Other ID formats (such as UUID's, ULID's) are implemented by other libraries.
|
||||
|
||||
In particular, Python itself has support for UUID in the Standard Library.
|
||||
|
||||
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
.. suou documentation master file, created by
|
||||
sphinx-quickstart on Fri Oct 10 19:24:23 2025.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
SUOU
|
||||
==================
|
||||
|
||||
SUOU (acronym for **SIS Unified Object Underarmour**) is a casual Python library providing utilities to
|
||||
ease programmer's QoL and write shorter and cleaner code that works.
|
||||
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
sqlalchemy
|
||||
iding
|
||||
validators
|
||||
color
|
||||
api
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=.
|
||||
set BUILDDIR=_build
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.https://www.sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
|
||||
sqlalchemy helpers
|
||||
==================
|
||||
|
||||
.. currentmodule:: suou.sqlalchemy
|
||||
|
||||
SUOU provides several helpers to make sqlalchemy learning curve less steep.
|
||||
|
||||
In fact, there are pre-made column presets for a specific purpose.
|
||||
|
||||
|
||||
Columns
|
||||
-------
|
||||
|
||||
.. autofunction:: id_column
|
||||
|
||||
.. warning::
|
||||
``id_column()`` expects SIQ's!
|
||||
|
||||
.. autofunction:: snowflake_column
|
||||
|
||||
.. autofunction:: match_column
|
||||
|
||||
.. autofunction:: secret_column
|
||||
|
||||
.. autofunction:: bool_column
|
||||
|
||||
.. autofunction:: username_column
|
||||
|
||||
.. autofunction:: unbound_fk
|
||||
.. autofunction:: bound_fk
|
||||
|
||||
Column pairs
|
||||
------------
|
||||
|
||||
.. autofunction:: age_pair
|
||||
.. autofunction:: author_pair
|
||||
.. autofunction:: parent_children
|
||||
|
||||
Misc
|
||||
----
|
||||
|
||||
.. autoclass:: BitSelector
|
||||
|
||||
.. autofunction:: match_constraint
|
||||
.. autofunction:: a_relationship
|
||||
.. autofunction:: declarative_base
|
||||
.. autofunction:: want_column
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
|
||||
validators
|
||||
==================
|
||||
|
||||
.. currentmodule:: suou.validators
|
||||
|
||||
Validators for use in frameworks such as Pydantic or Marshmallow.
|
||||
|
||||
.. autofunction:: matches
|
||||
|
||||
.. autofunction:: not_greater_than
|
||||
|
||||
.. autofunction:: not_less_than
|
||||
|
||||
.. autofunction:: yesno
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
[project]
|
||||
name = "suou"
|
||||
description = "casual utility library for coding QoL"
|
||||
name = "sakuragasaki46_suou"
|
||||
authors = [
|
||||
{ name = "Sakuragasaki46" }
|
||||
]
|
||||
|
|
@ -11,10 +10,7 @@ readme = "README.md"
|
|||
|
||||
dependencies = [
|
||||
"itsdangerous",
|
||||
"toml",
|
||||
"pydantic",
|
||||
"setuptools>=78.0.0",
|
||||
"uvloop; os_name=='posix'"
|
||||
"toml"
|
||||
]
|
||||
# - further devdependencies below - #
|
||||
|
||||
|
|
@ -27,60 +23,30 @@ classifiers = [
|
|||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14"
|
||||
"Programming Language :: Python :: 3.13"
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Repository = "https://nekode.yusur.moe/yusur/suou"
|
||||
Documentation = "https://suou.readthedocs.io"
|
||||
Repository = "https://github.com/sakuragasaki46/suou"
|
||||
|
||||
[project.optional-dependencies]
|
||||
# the below are all dev dependencies (and probably already installed)
|
||||
sqlalchemy = [
|
||||
"SQLAlchemy[asyncio]>=2.0.0",
|
||||
"flask-sqlalchemy"
|
||||
"SQLAlchemy>=2.0.0"
|
||||
]
|
||||
flask = [
|
||||
"Flask>=2.0.0",
|
||||
"Flask-RestX"
|
||||
]
|
||||
flask_sqlalchemy = [
|
||||
"suou[sqlalchemy]",
|
||||
"suou[flask]"
|
||||
"Flask-SqlAlchemy"
|
||||
]
|
||||
peewee = [
|
||||
## HEADS UP! peewee has setup.py, may slow down installation
|
||||
"peewee>=3.0.0"
|
||||
"peewee>=3.0.0, <4.0"
|
||||
]
|
||||
markdown = [
|
||||
"markdown>=3.0.0"
|
||||
]
|
||||
quart = [
|
||||
"Quart",
|
||||
"Quart-Schema",
|
||||
"starlette>=0.47.2"
|
||||
]
|
||||
sass = [
|
||||
## HEADS UP!! libsass carries a C extension + uses setup.py
|
||||
"libsass"
|
||||
]
|
||||
|
||||
full = [
|
||||
"suou[sqlalchemy]",
|
||||
"suou[flask]",
|
||||
"suou[quart]",
|
||||
"suou[peewee]",
|
||||
"suou[markdown]",
|
||||
"suou[sass]"
|
||||
]
|
||||
|
||||
docs = [
|
||||
"sphinx>=2.1",
|
||||
"myst_parser",
|
||||
"sphinx_rtd_theme"
|
||||
]
|
||||
|
||||
|
||||
[tool.setuptools.dynamic]
|
||||
version = { attr = "suou.__version__" }
|
||||
|
|
|
|||
|
|
@ -1,14 +0,0 @@
|
|||
# This file is only used for Sphinx.
|
||||
# End users should use pyproject.toml instead
|
||||
|
||||
itsdangerous==2.2.0
|
||||
libsass==0.23.0
|
||||
peewee==3.18.1
|
||||
pydantic==2.12.0
|
||||
quart_schema==0.22.0
|
||||
setuptools==80.9.0
|
||||
starlette==0.48.0
|
||||
SQLAlchemy==2.0.40
|
||||
toml==0.10.2
|
||||
sphinx_rtd_theme==3.0.2
|
||||
|
||||
|
|
@ -18,45 +18,22 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|||
|
||||
from .iding import Siq, SiqCache, SiqType, SiqGen
|
||||
from .codecs import (StringCase, cb32encode, cb32decode, b32lencode, b32ldecode, b64encode, b64decode, b2048encode, b2048decode,
|
||||
jsonencode, twocolon_list, want_bytes, want_str, ssv_list, want_urlsafe, want_urlsafe_bytes)
|
||||
from .bits import count_ones, mask_shift, split_bits, join_bits, mod_ceil, mod_floor
|
||||
from .calendar import want_datetime, want_isodate, want_timestamp, age_and_days
|
||||
jsonencode, want_bytes, want_str, ssv_list)
|
||||
from .bits import count_ones, mask_shift, split_bits, join_bits
|
||||
from .configparse import MissingConfigError, MissingConfigWarning, ConfigOptions, ConfigParserConfigSource, ConfigSource, DictConfigSource, ConfigValue, EnvConfigSource
|
||||
from .collections import TimedDict
|
||||
from .dei import dei_args
|
||||
from .functools import deprecated, not_implemented, timed_cache, none_pass, alru_cache, future
|
||||
from .functools import deprecated, not_implemented
|
||||
from .classtools import Wanted, Incomplete
|
||||
from .itertools import makelist, kwargs_prefix, ltuple, rtuple, additem, addattr
|
||||
from .itertools import makelist, kwargs_prefix, ltuple, rtuple, additem
|
||||
from .i18n import I18n, JsonI18n, TomlI18n
|
||||
from .signing import UserSigner
|
||||
from .snowflake import Snowflake, SnowflakeGen
|
||||
from .lex import symbol_table, lex, ilex
|
||||
from .strtools import PrefixIdentifier
|
||||
from .validators import matches, not_less_than, not_greater_than, yesno
|
||||
from .redact import redact_url_password
|
||||
from .http import WantsContentType
|
||||
from .color import OKLabColor, chalk, WebColor, RGBColor, SRGBColor, XYZColor, OKLabColor
|
||||
from .mat import Matrix
|
||||
|
||||
__version__ = "0.12.0a5"
|
||||
__version__ = "0.3.5"
|
||||
|
||||
__all__ = (
|
||||
'ConfigOptions', 'ConfigParserConfigSource', 'ConfigSource', 'ConfigValue',
|
||||
'DictConfigSource', 'EnvConfigSource', 'I18n', 'Incomplete', 'JsonI18n',
|
||||
'Matrix', 'MissingConfigError', 'MissingConfigWarning', 'OKLabColor',
|
||||
'PrefixIdentifier', 'RGBColor', 'SRGBColor',
|
||||
'Siq', 'SiqCache', 'SiqGen', 'SiqType', 'Snowflake', 'SnowflakeGen',
|
||||
'StringCase', 'TimedDict', 'TomlI18n', 'UserSigner', 'Wanted', 'WantsContentType',
|
||||
'WebColor', 'XYZColor',
|
||||
'addattr', 'additem', 'age_and_days', 'alru_cache', 'b2048decode', 'b2048encode',
|
||||
'b32ldecode', 'b32lencode', 'b64encode', 'b64decode', 'cb32encode',
|
||||
'cb32decode', 'chalk', 'count_ones', 'dei_args', 'deprecated',
|
||||
'future', 'ilex', 'join_bits',
|
||||
'jsonencode', 'kwargs_prefix', 'lex', 'ltuple', 'makelist', 'mask_shift',
|
||||
'matches', 'mod_ceil', 'mod_floor', 'must_be', 'none_pass', 'not_implemented',
|
||||
'not_less_than', 'not_greater_than',
|
||||
'redact_url_password', 'rtuple', 'split_bits', 'ssv_list', 'symbol_table',
|
||||
'timed_cache', 'twocolon_list', 'want_bytes', 'want_datetime', 'want_isodate',
|
||||
'want_str', 'want_timestamp', 'want_urlsafe', 'want_urlsafe_bytes', 'yesno',
|
||||
'z85encode', 'z85decode'
|
||||
'Siq', 'SiqCache', 'SiqType', 'SiqGen', 'StringCase',
|
||||
'MissingConfigError', 'MissingConfigWarning', 'ConfigOptions', 'ConfigParserConfigSource', 'ConfigSource', 'ConfigValue', 'EnvConfigSource', 'DictConfigSource',
|
||||
'deprecated', 'not_implemented', 'Wanted', 'Incomplete', 'jsonencode', 'ltuple', 'rtuple',
|
||||
'makelist', 'kwargs_prefix', 'I18n', 'JsonI18n', 'TomlI18n', 'cb32encode', 'cb32decode', 'count_ones', 'mask_shift',
|
||||
'want_bytes', 'want_str', 'version', 'b2048encode', 'split_bits', 'join_bits', 'b2048decode',
|
||||
'Snowflake', 'SnowflakeGen', 'ssv_list', 'additem', 'b32lencode', 'b32ldecode', 'b64encode', 'b64decode'
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,37 +0,0 @@
|
|||
"""
|
||||
ASGI stuff
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from typing import Any, Awaitable, Callable, MutableMapping, ParamSpec, Protocol
|
||||
|
||||
|
||||
## TYPES ##
|
||||
|
||||
# all the following is copied from Starlette
|
||||
# available in starlette.types as of starlette==0.47.2
|
||||
P = ParamSpec("P")
|
||||
|
||||
ASGIScope = MutableMapping[str, Any]
|
||||
ASGIMessage = MutableMapping[str, Any]
|
||||
|
||||
ASGIReceive = Callable[[], Awaitable[ASGIMessage]]
|
||||
ASGISend = Callable[[ASGIMessage], Awaitable[None]]
|
||||
ASGIApp = Callable[[ASGIScope, ASGIReceive, ASGISend], Awaitable[None]]
|
||||
|
||||
class _MiddlewareFactory(Protocol[P]):
|
||||
def __call__(self, app: ASGIApp, /, *args: P.args, **kwargs: P.kwargs) -> ASGIApp: ... # pragma: no cover
|
||||
|
||||
## end TYPES ##
|
||||
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
'''
|
||||
Utilities for working with bits & handy arithmetics
|
||||
Utilities for working with bits
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -93,19 +93,5 @@ def join_bits(l: list[int], nbits: int) -> bytes:
|
|||
return ou
|
||||
|
||||
|
||||
## arithmetics because yes
|
||||
|
||||
def mod_floor(x: int, y: int) -> int:
|
||||
"""
|
||||
Greatest integer smaller than x and divisible by y
|
||||
"""
|
||||
return x - x % y
|
||||
|
||||
def mod_ceil(x: int, y: int) -> int:
|
||||
"""
|
||||
Smallest integer greater than x and divisible by y
|
||||
"""
|
||||
return x + (y - x % y) % y
|
||||
|
||||
|
||||
__all__ = ('count_ones', 'mask_shift', 'split_bits', 'join_bits', 'mod_floor', 'mod_ceil')
|
||||
__all__ = ('count_ones', 'mask_shift', 'split_bits', 'join_bits')
|
||||
|
|
|
|||
|
|
@ -1,91 +0,0 @@
|
|||
"""
|
||||
Calendar utilities (mainly Gregorian oof)
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
|
||||
import datetime
|
||||
|
||||
from suou.luck import lucky
|
||||
from suou.validators import not_greater_than
|
||||
|
||||
|
||||
def want_isodate(d: datetime.datetime | str | float | int, *, tz = None) -> str:
|
||||
"""
|
||||
Convert a date into ISO timestamp (e.g. 2020-01-01T02:03:04)
|
||||
"""
|
||||
if isinstance(d, (int, float)):
|
||||
d = datetime.datetime.fromtimestamp(d, tz=tz)
|
||||
if isinstance(d, str):
|
||||
return d
|
||||
return d.isoformat()
|
||||
|
||||
|
||||
def want_datetime(d: datetime.datetime | str | float | int, *, tz = None) -> datetime.datetime:
|
||||
"""
|
||||
Convert a date into Python datetime.datetime (e.g. datetime.datetime(2020, 1, 1, 2, 3, 4)).
|
||||
|
||||
If a string is passed, ISO format is assumed
|
||||
"""
|
||||
if isinstance(d, str):
|
||||
d = datetime.datetime.fromisoformat(d)
|
||||
elif isinstance(d, (int, float)):
|
||||
d = datetime.datetime.fromtimestamp(d, tz=tz)
|
||||
return d
|
||||
|
||||
def want_timestamp(d: datetime.datetime | str | float | int, *, tz = None) -> float:
|
||||
"""
|
||||
Convert a date into UNIX timestamp (e.g. 1577840584.0). Returned as a float; decimals are milliseconds.
|
||||
"""
|
||||
if isinstance(d, str):
|
||||
d = want_datetime(d, tz=tz)
|
||||
if isinstance(d, (int, float)):
|
||||
return d
|
||||
return d.timestamp()
|
||||
|
||||
def age_and_days(date: datetime.datetime, now: datetime.datetime | None = None) -> tuple[int, int]:
|
||||
"""
|
||||
Compute age / duration of a timespan in years and days.
|
||||
"""
|
||||
if now is None:
|
||||
now = datetime.date.today()
|
||||
y = now.year - date.year - ((now.month, now.day) < (date.month, date.day))
|
||||
d = (now - datetime.date(date.year + y, date.month, date.day)).days
|
||||
return y, d
|
||||
|
||||
@lucky([not_greater_than(259200)])
|
||||
def parse_time(timestr: str, /) -> int:
|
||||
"""
|
||||
Parse a number-suffix (es. 3s, 15m) or colon (1:30) time expression.
|
||||
|
||||
Returns seconds as an integer.
|
||||
"""
|
||||
if timestr.isdigit():
|
||||
return int(timestr)
|
||||
elif ':' in timestr:
|
||||
timeparts = timestr.split(':')
|
||||
if not timeparts[0].isdigit() and not all(x.isdigit() and len(x) == 2 for x in timeparts[1:]):
|
||||
raise ValueError('invalid time format')
|
||||
return sum(int(x) * 60 ** (len(timeparts) - 1 - i) for i, x in enumerate(timeparts))
|
||||
elif timestr.endswith('s') and timestr[:-1].isdigit():
|
||||
return int(timestr[:-1])
|
||||
elif timestr.endswith('m') and timestr[:-1].isdigit():
|
||||
return int(timestr[:-1]) * 60
|
||||
elif timestr.endswith('h') and timestr[:-1].isdigit():
|
||||
return int(float(timestr[:-1]) * 3600)
|
||||
else:
|
||||
raise ValueError('invalid time format')
|
||||
|
||||
|
||||
__all__ = ('want_datetime', 'want_timestamp', 'want_isodate', 'age_and_days', 'parse_time')
|
||||
|
|
@ -14,27 +14,10 @@ This software is distributed on an "AS IS" BASIS,
|
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from types import EllipsisType
|
||||
from typing import Any, Callable, Generic, Iterable, Mapping, TypeVar
|
||||
import logging
|
||||
from typing import Any, Callable, Generic, Iterable, TypeVar
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class MissingType(object):
|
||||
__slots__ = ()
|
||||
def __bool__(self):
|
||||
return False
|
||||
|
||||
MISSING = MissingType()
|
||||
|
||||
def _not_missing(v) -> bool:
|
||||
return v and v is not MISSING
|
||||
|
||||
class Wanted(Generic[_T]):
|
||||
"""
|
||||
Placeholder for parameters wanted by Incomplete().
|
||||
|
|
@ -49,10 +32,10 @@ class Wanted(Generic[_T]):
|
|||
Owner class will call .__set_name__() on the parent Incomplete instance;
|
||||
the __set_name__ parameters (owner class and name) will be passed down here.
|
||||
"""
|
||||
_target: Callable | str | None | EllipsisType
|
||||
def __init__(self, getter: Callable | str | None | EllipsisType):
|
||||
_target: Callable | str | None | Ellipsis
|
||||
def __init__(self, getter: Callable | str | None | Ellipsis):
|
||||
self._target = getter
|
||||
def __call__(self, owner: type, name: str | None = None) -> _T | str | None:
|
||||
def __call__(self, owner: type, name: str | None = None) -> _T:
|
||||
if self._target is None or self._target is Ellipsis:
|
||||
return name
|
||||
elif isinstance(self._target, str):
|
||||
|
|
@ -73,10 +56,12 @@ class Incomplete(Generic[_T]):
|
|||
Missing arguments must be passed in the appropriate positions
|
||||
(positional or keyword) as a Wanted() object.
|
||||
"""
|
||||
_obj: Callable[..., _T]
|
||||
# XXX disabled for https://stackoverflow.com/questions/45864273/slots-conflicts-with-a-class-variable-in-a-generic-class
|
||||
#__slots__ = ('_obj', '_args', '_kwargs')
|
||||
_obj = Callable[Any, _T]
|
||||
_args: Iterable
|
||||
_kwargs: dict
|
||||
def __init__(self, obj: Callable[..., _T] | Wanted, *args, **kwargs):
|
||||
def __init__(self, obj: Callable[Any, _T] | Wanted, *args, **kwargs):
|
||||
if isinstance(obj, Wanted):
|
||||
self._obj = lambda x: x
|
||||
self._args = (obj, )
|
||||
|
|
@ -113,87 +98,6 @@ class Incomplete(Generic[_T]):
|
|||
clsdict[k] = v.instance()
|
||||
return clsdict
|
||||
|
||||
|
||||
## Base classes for declarative argument / option parsers below
|
||||
|
||||
class ValueSource(Mapping):
|
||||
"""
|
||||
Abstract value source.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ValueProperty(Generic[_T]):
|
||||
_name: str | None
|
||||
_srcs: dict[str, str]
|
||||
_val: Any | MissingType
|
||||
_default: Any | None
|
||||
_cast: Callable | None
|
||||
_required: bool
|
||||
_pub_name: str | bool = False
|
||||
_not_found = LookupError
|
||||
|
||||
def __init__(self, /, src: str | None = None, *,
|
||||
default = None, cast: Callable | None = None,
|
||||
required: bool = False, public: str | bool = False,
|
||||
**kwargs
|
||||
):
|
||||
self._srcs = dict()
|
||||
if src:
|
||||
self._srcs['default'] = src
|
||||
self._default = default
|
||||
self._cast = cast
|
||||
self._required = required
|
||||
self._pub_name = public
|
||||
self._val = MISSING
|
||||
for k, v in kwargs.items():
|
||||
if k.endswith('_src'):
|
||||
self._srcs[k[:-4]] = v
|
||||
else:
|
||||
raise TypeError(f'unknown keyword argument {k!r}')
|
||||
|
||||
def __set_name__(self, owner, name: str, *, src_name: str | None = None):
|
||||
self._name = name
|
||||
self._srcs.setdefault('default', src_name or name)
|
||||
nsrcs = dict()
|
||||
for k, v in self._srcs.items():
|
||||
if v.endswith('?'):
|
||||
nsrcs[k] = v.rstrip('?') + (src_name or name)
|
||||
self._srcs.update(nsrcs)
|
||||
if self._pub_name is True:
|
||||
self._pub_name = name
|
||||
def __get__(self, obj: Any, owner = None):
|
||||
if self._val is MISSING:
|
||||
v = MISSING
|
||||
for srckey, src in self._srcs.items():
|
||||
if (getter := self._getter(obj, srckey)):
|
||||
v = getter.get(src, v)
|
||||
if _not_missing(v):
|
||||
if srckey != 'default':
|
||||
logger.info(f'value {self._name} found in {srckey} source')
|
||||
break
|
||||
if not _not_missing(v):
|
||||
if self._required:
|
||||
raise self._not_found(f'required config {self._srcs['default']} not set!')
|
||||
else:
|
||||
v = self._default
|
||||
if callable(self._cast):
|
||||
v = self._cast(v) if v is not None else self._cast()
|
||||
self._val = v
|
||||
return self._val
|
||||
|
||||
@abstractmethod
|
||||
def _getter(self, obj: Any, name: str = 'default') -> ValueSource:
|
||||
pass
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def source(self, /):
|
||||
return self._srcs['default']
|
||||
|
||||
|
||||
__all__ = ('Wanted', 'Incomplete', 'ValueSource', 'ValueProperty')
|
||||
|
||||
__all__ = (
|
||||
'Wanted', 'Incomplete'
|
||||
)
|
||||
|
|
@ -22,7 +22,7 @@ import math
|
|||
import re
|
||||
from typing import Any, Callable
|
||||
|
||||
from .bits import mod_ceil, split_bits, join_bits
|
||||
from .bits import split_bits, join_bits
|
||||
from .functools import deprecated
|
||||
|
||||
# yes, I know ItsDangerous implements that as well, but remember
|
||||
|
|
@ -49,26 +49,6 @@ def want_str(s: str | bytes, encoding: str = "utf-8", errors: str = "strict") ->
|
|||
s = s.decode(encoding, errors)
|
||||
return s
|
||||
|
||||
|
||||
BASE64_TO_URLSAFE = str.maketrans('+/', '-_', ' ')
|
||||
|
||||
def want_urlsafe(s: str | bytes) -> str:
|
||||
"""
|
||||
Force a Base64 string into its urlsafe representation.
|
||||
|
||||
Behavior is unchecked and undefined with anything else than Base64 strings.
|
||||
In particular, this is NOT an URL encoder.
|
||||
|
||||
Used by b64encode() and b64decode().
|
||||
"""
|
||||
return want_str(s).translate(BASE64_TO_URLSAFE)
|
||||
|
||||
def want_urlsafe_bytes(s: str | bytes) -> bytes:
|
||||
"""
|
||||
Shorthand for want_bytes(want_urlsafe(s)).
|
||||
"""
|
||||
return want_bytes(want_urlsafe(s))
|
||||
|
||||
B32_TO_CROCKFORD = str.maketrans(
|
||||
'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567',
|
||||
'0123456789ABCDEFGHJKMNPQRSTVWXYZ',
|
||||
|
|
@ -79,7 +59,6 @@ CROCKFORD_TO_B32 = str.maketrans(
|
|||
'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567',
|
||||
'=')
|
||||
|
||||
|
||||
BIP39_WORD_LIST = """
|
||||
abandon ability able about above absent absorb abstract absurd abuse access accident account accuse achieve acid acoustic acquire across act action
|
||||
actor actress actual adapt add addict address adjust admit adult advance advice aerobic affair afford afraid again age agent agree ahead aim air airport
|
||||
|
|
@ -179,13 +158,7 @@ def cb32encode(val: bytes) -> str:
|
|||
'''
|
||||
return want_str(base64.b32encode(val)).translate(B32_TO_CROCKFORD)
|
||||
|
||||
def cb32lencode(val: bytes) -> str:
|
||||
'''
|
||||
Encode bytes in Crockford Base32, lowercased.
|
||||
'''
|
||||
return want_str(base64.b32encode(val)).translate(B32_TO_CROCKFORD).lower()
|
||||
|
||||
def cb32decode(val: bytes | str) -> bytes:
|
||||
def cb32decode(val: bytes | str) -> str:
|
||||
'''
|
||||
Decode bytes from Crockford Base32.
|
||||
'''
|
||||
|
|
@ -205,53 +178,16 @@ def b32ldecode(val: bytes | str) -> bytes:
|
|||
|
||||
def b64encode(val: bytes, *, strip: bool = True) -> str:
|
||||
'''
|
||||
Wrapper around base64.urlsafe_b64encode() which also strips trailing '='.
|
||||
Wrapper around base64.urlsafe_b64encode() which also strips trailing '=' and leading 'A'.
|
||||
'''
|
||||
b = want_str(base64.urlsafe_b64encode(val))
|
||||
return b.rstrip('=') if strip else b
|
||||
return b.lstrip('A').rstrip('=') if strip else b
|
||||
|
||||
def b64decode(val: bytes | str) -> bytes:
|
||||
'''
|
||||
Wrapper around base64.urlsafe_b64decode() which deals with padding.
|
||||
'''
|
||||
val = want_urlsafe(val)
|
||||
return base64.urlsafe_b64decode(val.ljust(mod_ceil(len(val), 4), '='))
|
||||
|
||||
def rb64encode(val: bytes, *, strip: bool = True) -> str:
|
||||
'''
|
||||
Call base64.urlsafe_b64encode() with null bytes i.e. '\\0' padding to the start. Leading 'A' are stripped from result.
|
||||
'''
|
||||
b = want_str(base64.urlsafe_b64encode(val.rjust(mod_ceil(len(val), 3), '\0')))
|
||||
return b.lstrip('A') if strip else b
|
||||
|
||||
def rb64decode(val: bytes | str) -> bytes:
|
||||
'''
|
||||
Wrapper around base64.urlsafe_b64decode() which deals with padding.
|
||||
'''
|
||||
val = want_urlsafe(val)
|
||||
return base64.urlsafe_b64decode(val.rjust(mod_ceil(len(val), 4), 'A'))
|
||||
|
||||
|
||||
B85_TO_Z85 = str.maketrans(
|
||||
'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~',
|
||||
'0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ.-:+=^!/*?&<>()[]{}@%$#'
|
||||
)
|
||||
Z85_TO_B85 = str.maketrans(
|
||||
'0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ.-:+=^!/*?&<>()[]{}@%$#',
|
||||
'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~'
|
||||
)
|
||||
|
||||
if hasattr(base64, 'z85encode'):
|
||||
# Python >=3.13
|
||||
def z85encode(val: bytes) -> str:
|
||||
return want_str(base64.z85encode(val))
|
||||
z85decode = base64.z85decode
|
||||
else:
|
||||
# Python <=3.12
|
||||
def z85encode(val: bytes) -> str:
|
||||
return want_str(base64.b85encode(val)).translate(B85_TO_Z85)
|
||||
def z85decode(val: bytes | str) -> bytes:
|
||||
return base64.b85decode(want_str(val).translate(Z85_TO_B85))
|
||||
return base64.urlsafe_b64decode(want_bytes(val).replace(b'/', b'_').replace(b'+', b'-') + b'=' * ((4 - len(val) % 4) % 4))
|
||||
|
||||
def b2048encode(val: bytes) -> str:
|
||||
'''
|
||||
|
|
@ -322,22 +258,6 @@ def ssv_list(s: str, *, sep_chars = ',;') -> list[str]:
|
|||
l.pop()
|
||||
return l
|
||||
|
||||
def twocolon_list(s: str | None) -> list[str]:
|
||||
"""
|
||||
Parse a string on a single line as multiple lines, each line separated by double colon (::).
|
||||
|
||||
Returns a list.
|
||||
"""
|
||||
if not s:
|
||||
return []
|
||||
return [x.strip() for x in s.split('::')]
|
||||
|
||||
def quote_css_string(s):
|
||||
"""Quotes a string as CSS string literal.
|
||||
|
||||
Source: libsass==0.23.0"""
|
||||
return "'" + ''.join(('\\%06x' % ord(c)) for c in s) + "'"
|
||||
|
||||
class StringCase(enum.Enum):
|
||||
"""
|
||||
Enum values used by regex validators and storage converters.
|
||||
|
|
@ -373,6 +293,5 @@ class StringCase(enum.Enum):
|
|||
|
||||
__all__ = (
|
||||
'cb32encode', 'cb32decode', 'b32lencode', 'b32ldecode', 'b64encode', 'b64decode', 'jsonencode'
|
||||
'StringCase', 'want_bytes', 'want_str', 'jsondecode', 'ssv_list', 'twocolon_list', 'want_urlsafe', 'want_urlsafe_bytes',
|
||||
'z85encode', 'z85decode'
|
||||
'StringCase', 'want_bytes', 'want_str', 'jsondecode', 'ssv_list'
|
||||
)
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
"""
|
||||
Miscellaneous iterables
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
from __future__ import annotations
|
||||
import time
|
||||
from typing import Iterator, TypeVar
|
||||
|
||||
|
||||
_KT = TypeVar('_KT')
|
||||
_VT = TypeVar('_VT')
|
||||
|
||||
class TimedDict(dict[_KT, _VT]):
|
||||
"""
|
||||
Dictionary where keys expire after the defined time to live, expressed in seconds.
|
||||
|
||||
*New in 0.5.0*
|
||||
"""
|
||||
_expires: dict[_KT, int]
|
||||
_ttl: int
|
||||
|
||||
def __init__(self, ttl: int, /, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._ttl = ttl
|
||||
self._expires = dict()
|
||||
|
||||
def check_ex(self, key: _KT):
|
||||
if super().__contains__(key):
|
||||
ex = self._expires[key]
|
||||
now = int(time.time())
|
||||
if ex < now:
|
||||
del self._expires[key]
|
||||
super().__delitem__(key)
|
||||
elif key in self._expires:
|
||||
del self._expires[key]
|
||||
|
||||
def __getitem__(self, key: _KT, /) -> _VT:
|
||||
self.check_ex(key)
|
||||
return super().__getitem__(key)
|
||||
|
||||
def get(self, key: _KT, default: _VT | None = None, /) -> _VT | None:
|
||||
self.check_ex(key)
|
||||
return super().get(key)
|
||||
|
||||
def __setitem__(self, key: _KT, value: _VT, /) -> None:
|
||||
self._expires = int(time.time() + self._ttl)
|
||||
super().__setitem__(key, value)
|
||||
|
||||
def setdefault(self, key: _KT, default: _VT, /) -> _VT:
|
||||
self.check_ex(key)
|
||||
self._expires = int(time.time() + self._ttl)
|
||||
return super().setdefault(key, default)
|
||||
|
||||
def __delitem__(self, key: _KT, /) -> None:
|
||||
del self._expires[key]
|
||||
super().__delitem__(key)
|
||||
|
||||
def __iter__(self) -> Iterator[_KT]:
|
||||
for k in super():
|
||||
self.check_ex(k)
|
||||
return super().__iter__()
|
||||
|
||||
__all__ = ('TimedDict',)
|
||||
|
|
@ -1,307 +0,0 @@
|
|||
"""
|
||||
Colors for coding artists
|
||||
|
||||
*New in 0.7.0*
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import namedtuple
|
||||
from functools import lru_cache
|
||||
import math
|
||||
|
||||
from suou.mat import Matrix
|
||||
|
||||
|
||||
class Chalk:
|
||||
"""
|
||||
ANSI escape codes for terminal colors, similar to JavaScript's `chalk` library.
|
||||
|
||||
Best used with Python 3.12+ that allows arbitrary nesting of f-strings.
|
||||
|
||||
Yes, I am aware colorama exists.
|
||||
|
||||
UNTESTED
|
||||
|
||||
*New in 0.7.0*
|
||||
"""
|
||||
CSI = '\x1b['
|
||||
RED = CSI + "31m"
|
||||
GREEN = CSI + "32m"
|
||||
YELLOW = CSI + "33m"
|
||||
BLUE = CSI + "34m"
|
||||
CYAN = CSI + "36m"
|
||||
PURPLE = CSI + "35m"
|
||||
GREY = CSI + "90m"
|
||||
END_COLOR = CSI + "39m"
|
||||
BOLD = CSI + "1m"
|
||||
END_BOLD = CSI + "22m"
|
||||
FAINT = CSI + "2m"
|
||||
def __init__(self, flags = (), ends = ()):
|
||||
self._flags = tuple(flags)
|
||||
self._ends = tuple(ends)
|
||||
@lru_cache()
|
||||
def _wrap(self, beg, end):
|
||||
return Chalk(self._flags + (beg,), self._ends + (end,))
|
||||
def __call__(self, s: str) -> str:
|
||||
return ''.join(self._flags) + s + ''.join(reversed(self._ends))
|
||||
@property
|
||||
def red(self):
|
||||
return self._wrap(self.RED, self.END_COLOR)
|
||||
@property
|
||||
def green(self):
|
||||
return self._wrap(self.GREEN, self.END_COLOR)
|
||||
@property
|
||||
def blue(self):
|
||||
return self._wrap(self.BLUE, self.END_COLOR)
|
||||
@property
|
||||
def yellow(self):
|
||||
return self._wrap(self.YELLOW, self.END_COLOR)
|
||||
@property
|
||||
def cyan(self):
|
||||
return self._wrap(self.CYAN, self.END_COLOR)
|
||||
@property
|
||||
def purple(self):
|
||||
return self._wrap(self.PURPLE, self.END_COLOR)
|
||||
@property
|
||||
def grey(self):
|
||||
return self._wrap(self.GREY, self.END_COLOR)
|
||||
gray = grey
|
||||
marine = blue
|
||||
magenta = purple
|
||||
@property
|
||||
def bold(self):
|
||||
return self._wrap(self.BOLD, self.END_BOLD)
|
||||
@property
|
||||
def faint(self):
|
||||
return self._wrap(self.FAINT, self.END_BOLD)
|
||||
|
||||
|
||||
## TODO make it lazy / an instance variable?
|
||||
chalk = Chalk()
|
||||
|
||||
|
||||
## Utilities for web colors
|
||||
|
||||
class RGBColor(namedtuple('_WebColor', 'red green blue')):
|
||||
"""
|
||||
Representation of a color in the RGB TrueColor space.
|
||||
|
||||
Useful for theming.
|
||||
|
||||
*Changed in 0.12.0*: name is now RGBColor, with WebColor being an alias.
|
||||
Added conversions to and from OKLCH, OKLab, sRGB, and XYZ.
|
||||
"""
|
||||
def lighten(self, *, factor = .75):
|
||||
"""
|
||||
Return a whitened shade of the color.
|
||||
Factor stands between 0 and 1: 0 = total white, 1 = no change. Default is .75
|
||||
"""
|
||||
return WebColor(
|
||||
255 - int((255 - self.red) * factor),
|
||||
255 - int((255 - self.green) * factor),
|
||||
255 - int((255 - self.blue) * factor),
|
||||
)
|
||||
def darken(self, *, factor = .75):
|
||||
"""
|
||||
Return a darkened shade of the color.
|
||||
Factor stands between 0 and 1: 0 = total black, 1 = no change. Default is .75
|
||||
"""
|
||||
return WebColor(
|
||||
int(self.red * factor),
|
||||
int(self.green * factor),
|
||||
int(self.blue * factor)
|
||||
)
|
||||
def greyen(self, *, factor = .75):
|
||||
"""
|
||||
Return a desaturated shade of the color.
|
||||
Factor stands between 0 and 1: 0 = gray, 1 = no change. Default is .75
|
||||
"""
|
||||
return self.darken(factor=factor) + self.lighten(factor=factor)
|
||||
|
||||
def blend_with(self, other: RGBColor):
|
||||
"""
|
||||
Mix two colors, returning the average.
|
||||
"""
|
||||
return RGBColor (
|
||||
(self.red + other.red) // 2,
|
||||
(self.green + other.green) // 2,
|
||||
(self.blue + other.blue) // 2
|
||||
)
|
||||
|
||||
def to_srgb(self):
|
||||
"""
|
||||
Convert to sRGB space.
|
||||
|
||||
*New in 0.12.0*
|
||||
"""
|
||||
return SRGBColor(*(
|
||||
(i / 12.92 if abs(i) <= 0.04045 else
|
||||
(-1 if i < 0 else 1) * (((abs(i) + 0.55)) / 1.055) ** 2.4) for i in self
|
||||
))
|
||||
|
||||
def to_oklab(self):
|
||||
return self.to_xyz().to_oklab()
|
||||
|
||||
__add__ = blend_with
|
||||
|
||||
def __str__(self):
|
||||
return f"rgb({self.red}, {self.green}, {self.blue})"
|
||||
|
||||
RGB_TO_XYZ = Matrix([
|
||||
[0.41239079926595934, 0.357584339383878, 0.1804807884018343],
|
||||
[0.21263900587151027, 0.715168678767756, 0.07219231536073371],
|
||||
[0.01933081871559182, 0.11919477979462598, 0.9505321522496607]
|
||||
])
|
||||
|
||||
def to_xyz(self):
|
||||
return XYZColor(*(self.RGB_TO_XYZ @ Matrix.as_column(self)).get_column())
|
||||
|
||||
def to_oklch(self):
|
||||
return self.to_xyz().to_oklch()
|
||||
|
||||
def to_oklab(self):
|
||||
return self.to_xyz().to_oklab()
|
||||
|
||||
WebColor = RGBColor
|
||||
|
||||
## The following have been adapted from
|
||||
## https://gist.github.com/dkaraush/65d19d61396f5f3cd8ba7d1b4b3c9432
|
||||
|
||||
class SRGBColor(namedtuple('_SRGBColor', 'red green blue')):
|
||||
"""
|
||||
Represent a color in the sRGB space.
|
||||
|
||||
*New in 0.12.0*
|
||||
"""
|
||||
red: float
|
||||
green: float
|
||||
blue: float
|
||||
|
||||
def __str__(self):
|
||||
return f"srgb({self.red}, {self.green}, {self.blue})"
|
||||
|
||||
def to_rgb(self):
|
||||
return RGBColor(*(
|
||||
((-1 if i < 0 else 1) * (1.055 * (abs(i) ** (1/2.4)) - 0.055)
|
||||
if abs(i) > 0.0031308 else 12.92 * i) for i in self))
|
||||
|
||||
def to_xyz(self):
|
||||
return self.to_rgb().to_xyz()
|
||||
|
||||
def to_oklab(self):
|
||||
return self.to_rgb().to_oklab()
|
||||
|
||||
|
||||
class XYZColor(namedtuple('_XYZColor', 'x y z')):
|
||||
"""
|
||||
Represent a color in the XYZ color space.
|
||||
|
||||
*New in 0.12.0*
|
||||
"""
|
||||
|
||||
XYZ_TO_RGB = Matrix([
|
||||
[ 3.2409699419045226, -1.537383177570094, -0.4986107602930034],
|
||||
[-0.9692436362808796, 1.8759675015077202, 0.04155505740717559],
|
||||
[ 0.05563007969699366, -0.20397695888897652, 1.0569715142428786]
|
||||
])
|
||||
|
||||
XYZ_TO_LMS = Matrix([
|
||||
[0.8190224379967030, 0.3619062600528904, -0.1288737815209879],
|
||||
[0.0329836539323885, 0.9292868615863434, 0.0361446663506424],
|
||||
[0.0481771893596242, 0.2642395317527308, 0.6335478284694309]
|
||||
])
|
||||
|
||||
LMSG_TO_OKLAB = Matrix([
|
||||
[0.2104542683093140, 0.7936177747023054, -0.0040720430116193],
|
||||
[1.9779985324311684, -2.4285922420485799, 0.4505937096174110],
|
||||
[0.0259040424655478, 0.7827717124575296, -0.8086757549230774]
|
||||
])
|
||||
|
||||
def to_rgb(self):
|
||||
return RGBColor(*(self.XYZ_TO_RGB @ Matrix.as_column(self)).get_column())
|
||||
|
||||
def to_oklab(self):
|
||||
lms = (self.XYZ_TO_LMS @ Matrix.as_column(self)).get_column()
|
||||
lmsg = [math.cbrt(i) for i in lms]
|
||||
oklab = (self.LMSG_TO_OKLAB @ Matrix.as_column(self)).get_column()
|
||||
return OKLabColor(*oklab)
|
||||
|
||||
def to_oklch(self):
|
||||
return self.to_oklab().to_oklch()
|
||||
|
||||
|
||||
class OKLabColor(namedtuple('_OKLabColor', 'l a b')):
|
||||
"""
|
||||
Represent a color in the OKLab color space.
|
||||
|
||||
*New in 0.12.0*
|
||||
"""
|
||||
|
||||
OKLAB_TO_LMSG = Matrix([
|
||||
[1., 0.3963377773761749, 0.2158037573099136],
|
||||
[1., -0.1055613458156586, -0.0638541728258133],
|
||||
[1., -0.0894841775298119, -1.2914855480194092]
|
||||
])
|
||||
|
||||
LMS_TO_XYZ = Matrix([
|
||||
[ 1.2268798758459243, -0.5578149944602171, 0.2813910456659647],
|
||||
[-0.0405757452148008, 1.1122868032803170, -0.0717110580655164],
|
||||
[-0.0763729366746601, -0.4214933324022432, 1.5869240198367816]
|
||||
])
|
||||
|
||||
def to_xyz(self):
|
||||
lmsg = (self.OKLAB_TO_LMSG @ Matrix.as_column(self)).get_column()
|
||||
lms = [i ** 3 for i in lmsg]
|
||||
xyz = (self.LMS_TO_XYZ @ Matrix.as_column(lms)).get_column()
|
||||
return XYZColor(*xyz)
|
||||
|
||||
def to_oklch(self):
|
||||
return OKLCHColor(
|
||||
self.l,
|
||||
math.sqrt(self.a ** 2 + self.b ** 2),
|
||||
0 if abs(self.a) < .0002 and abs(self.b) < .0002 else (((math.atan2(self.b, self.a) * 180) / math.pi % 360) + 360) % 360
|
||||
)
|
||||
|
||||
def to_rgb(self):
|
||||
return self.to_xyz().to_rgb()
|
||||
|
||||
class OKLCHColor(namedtuple('_OKLCHColor', 'l c h')):
|
||||
"""
|
||||
Represent a color in the OKLCH color space.
|
||||
|
||||
*Warning*: conversion to RGB is not bound checked yet!
|
||||
|
||||
*New in 0.12.0*
|
||||
"""
|
||||
|
||||
def __str__(self):
|
||||
l, c, h = round(self.l, 4), round(self.c, 4), round(self.h, 4)
|
||||
|
||||
return f'oklch({l}, {c}, {h})'
|
||||
|
||||
|
||||
def to_oklab(self):
|
||||
return OKLabColor(
|
||||
self.l,
|
||||
self.c * math.cos(self.h * math.pi / 180),
|
||||
self.h * math.cos(self.h * math.pi / 180)
|
||||
)
|
||||
|
||||
def to_rgb(self):
|
||||
return self.to_oklab().to_rgb()
|
||||
|
||||
__all__ = ('chalk', 'WebColor', "RGBColor", 'SRGBColor', 'XYZColor', 'OKLabColor')
|
||||
|
|
@ -15,29 +15,41 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from ast import TypeVar
|
||||
from collections.abc import Mapping
|
||||
from configparser import ConfigParser as _ConfigParser
|
||||
import os
|
||||
from typing import Any, Callable, Iterator, override
|
||||
from typing import Any, Callable, Iterator
|
||||
from collections import OrderedDict
|
||||
|
||||
from argparse import Namespace
|
||||
|
||||
from .classtools import ValueSource, ValueProperty
|
||||
from .functools import deprecated
|
||||
from .exceptions import MissingConfigError, MissingConfigWarning
|
||||
|
||||
from .functools import deprecated_alias
|
||||
|
||||
|
||||
MISSING = object()
|
||||
_T = TypeVar('T')
|
||||
|
||||
|
||||
class MissingConfigError(LookupError):
|
||||
"""
|
||||
Config variable not found.
|
||||
|
||||
class ConfigSource(ValueSource):
|
||||
Raised when a config property is marked as required, but no property with
|
||||
that name is found.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class MissingConfigWarning(MissingConfigError, Warning):
|
||||
"""
|
||||
A required config property is missing, and the application is assuming a default value.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ConfigSource(Mapping):
|
||||
'''
|
||||
Abstract config value source.
|
||||
Abstract config source.
|
||||
'''
|
||||
__slots__ = ()
|
||||
|
||||
|
|
@ -107,30 +119,7 @@ class DictConfigSource(ConfigSource):
|
|||
def __len__(self) -> int:
|
||||
return len(self._d)
|
||||
|
||||
class ArgConfigSource(ValueSource):
|
||||
"""
|
||||
Config source that assumes arguments have already been parsed.
|
||||
|
||||
*New in 0.6.0*
|
||||
"""
|
||||
_ns: Namespace
|
||||
def __init__(self, ns: Namespace):
|
||||
super().__init__()
|
||||
self._ns = ns
|
||||
def __getitem__(self, key):
|
||||
return getattr(self._ns, key)
|
||||
def get(self, key, value):
|
||||
return getattr(self._ns, key, value)
|
||||
def __contains__(self, key: str, /) -> bool:
|
||||
return hasattr(self._ns, key)
|
||||
@deprecated('Here for Mapping() implementation. Untested and unused')
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
yield from self._ns._get_args()
|
||||
@deprecated('Here for Mapping() implementation. Untested and unused')
|
||||
def __len__(self) -> int:
|
||||
return len(self._ns._get_args())
|
||||
|
||||
class ConfigValue(ValueProperty):
|
||||
class ConfigValue:
|
||||
"""
|
||||
A single config property.
|
||||
|
||||
|
|
@ -146,43 +135,63 @@ class ConfigValue(ValueProperty):
|
|||
- preserve_case: if True, src is not CAPITALIZED. Useful for parsing from Python dictionaries or ConfigParser's
|
||||
- required: throw an error if empty or not supplied
|
||||
"""
|
||||
|
||||
_preserve_case: bool = False
|
||||
_prefix: str | None = None
|
||||
_not_found = MissingConfigError
|
||||
# XXX disabled for https://stackoverflow.com/questions/45864273/slots-conflicts-with-a-class-variable-in-a-generic-class
|
||||
#__slots__ = ('_srcs', '_val', '_default', '_cast', '_required', '_preserve_case')
|
||||
|
||||
_srcs: dict[str, str] | None
|
||||
_preserve_case: bool = False
|
||||
_val: Any | MISSING = MISSING
|
||||
_default: Any | None
|
||||
_cast: Callable | None
|
||||
_required: bool
|
||||
_pub_name: str | bool = False
|
||||
def __init__(self, /,
|
||||
src: str | None = None, *, default = None, cast: Callable | None = None,
|
||||
required: bool = False, preserve_case: bool = False, prefix: str | None = None,
|
||||
public: str | bool = False, **kwargs):
|
||||
self._srcs = dict()
|
||||
self._preserve_case = preserve_case
|
||||
if src and not preserve_case:
|
||||
src = src.upper()
|
||||
if not src and prefix:
|
||||
self._prefix = prefix
|
||||
if not preserve_case:
|
||||
src = f'{prefix.upper()}?'
|
||||
if src:
|
||||
self._srcs['default'] = src if preserve_case else src.upper()
|
||||
elif prefix:
|
||||
self._srcs['default'] = f'{prefix if preserve_case else prefix.upper}?'
|
||||
self._default = default
|
||||
self._cast = cast
|
||||
self._required = required
|
||||
self._pub_name = public
|
||||
for k, v in kwargs.items():
|
||||
if k.endswith('_src'):
|
||||
self._srcs[k[:-4]] = v
|
||||
else:
|
||||
src = f'{prefix}?'
|
||||
|
||||
super().__init__(src, default=default, cast=cast,
|
||||
required=required, public=public, **kwargs
|
||||
)
|
||||
|
||||
raise TypeError(f'unknown keyword argument {k!r}')
|
||||
def __set_name__(self, owner, name: str):
|
||||
src_name = name if self._preserve_case else name.upper()
|
||||
|
||||
super().__set_name__(owner, name, src_name=src_name)
|
||||
|
||||
if 'default' not in self._srcs:
|
||||
self._srcs['default'] = name if self._preserve_case else name.upper()
|
||||
elif self._srcs['default'].endswith('?'):
|
||||
self._srcs['default'] = self._srcs['default'].rstrip('?') + (name if self._preserve_case else name.upper() )
|
||||
|
||||
if self._pub_name is True:
|
||||
self._pub_name = name
|
||||
if self._pub_name and isinstance(owner, ConfigOptions):
|
||||
owner.expose(self._pub_name, name)
|
||||
|
||||
def __get__(self, obj: ConfigOptions, owner=False):
|
||||
if self._val is MISSING:
|
||||
v = MISSING
|
||||
for srckey, src in obj._srcs.items():
|
||||
if srckey in self._srcs:
|
||||
v = src.get(self._srcs[srckey], v)
|
||||
if self._required and (not v or v is MISSING):
|
||||
raise MissingConfigError(f'required config {self._srcs['default']} not set!')
|
||||
if v is MISSING:
|
||||
v = self._default
|
||||
if callable(self._cast):
|
||||
v = self._cast(v) if v is not None else self._cast()
|
||||
self._val = v
|
||||
return self._val
|
||||
|
||||
@override
|
||||
def _getter(self, obj: ConfigOptions, name: str = 'default') -> ConfigSource:
|
||||
if not isinstance(obj._srcs, Mapping):
|
||||
raise RuntimeError('attempt to get config value with no source configured')
|
||||
return obj._srcs.get(name)
|
||||
@property
|
||||
def source(self, /):
|
||||
return self._srcs['default']
|
||||
|
||||
|
||||
class ConfigOptions:
|
||||
|
|
@ -211,7 +220,7 @@ class ConfigOptions:
|
|||
if first:
|
||||
self._srcs.move_to_end(key, False)
|
||||
|
||||
add_config_source = deprecated('use add_source() instead')(add_source)
|
||||
add_config_source = deprecated_alias(add_source)
|
||||
|
||||
def expose(self, public_name: str, attr_name: str | None = None) -> None:
|
||||
'''
|
||||
|
|
@ -230,8 +239,7 @@ class ConfigOptions:
|
|||
|
||||
|
||||
__all__ = (
|
||||
'MissingConfigError', 'MissingConfigWarning', 'ConfigOptions', 'EnvConfigSource', 'ConfigParserConfigSource', 'DictConfigSource', 'ConfigSource', 'ConfigValue',
|
||||
'ArgConfigSource'
|
||||
'MissingConfigError', 'MissingConfigWarning', 'ConfigOptions', 'EnvConfigSource', 'ConfigParserConfigSource', 'DictConfigSource', 'ConfigSource', 'ConfigValue'
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
140
src/suou/dei.py
140
src/suou/dei.py
|
|
@ -1,140 +0,0 @@
|
|||
"""
|
||||
Utilities for Diversity, Equity, Inclusion.
|
||||
|
||||
This implements a cool compact representation for pronouns, inspired by the one in use at <https://pronoundb.org/>
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import annotations
|
||||
from functools import wraps
|
||||
from typing import Callable, TypeVar
|
||||
|
||||
_T = TypeVar('_T')
|
||||
_U = TypeVar('_U')
|
||||
|
||||
|
||||
BRICKS = '@abcdefghijklmnopqrstuvwxyz+?-\'/'
|
||||
"""
|
||||
Legend:
|
||||
a through z, ' (apostrophe) and - (hyphen/dash) mean what they mean.
|
||||
? is an unknown symbol or non-ASCII/non-alphabetic character.
|
||||
+ is a suffix separator (like / but allows for a more compact notation).
|
||||
/ is the separator.
|
||||
|
||||
Except for the presets (see Pronoun.PRESETS below), pronouns expand to the
|
||||
given notation: e.g. ae+r is ae/aer.
|
||||
"""
|
||||
|
||||
class Pronoun(int):
|
||||
"""
|
||||
Implementation of pronouns in a compact style.
|
||||
A pronoun is first normalized, then furtherly compressed by turning it
|
||||
into an integer (see Pronoun.from_short()).
|
||||
|
||||
Subclass of int, ideal for databases. Short form is recommended in
|
||||
transfer (e.g. if writing a REST).
|
||||
"""
|
||||
PRESETS = {
|
||||
'hh': 'he/him',
|
||||
'sh': 'she/her',
|
||||
'tt': 'they/them',
|
||||
'ii': 'it/its',
|
||||
'hs': 'he/she',
|
||||
'ht': 'he/they',
|
||||
'hi': 'he/it',
|
||||
'shh': 'she/he',
|
||||
'st': 'she/they',
|
||||
'si': 'she/it',
|
||||
'th': 'they/he',
|
||||
'ts': 'they/she',
|
||||
'ti': 'they/it',
|
||||
}
|
||||
|
||||
UNSPECIFIED = 0
|
||||
|
||||
## presets from PronounDB
|
||||
## DO NOT TOUCH the values unless you know their exact correspondence!!
|
||||
## hint: Pronoun.from_short()
|
||||
HE = HE_HIM = 264
|
||||
SHE = SHE_HER = 275
|
||||
THEY = THEY_THEM = 660
|
||||
IT = IT_ITS = 297
|
||||
HE_SHE = 616
|
||||
HE_THEY = 648
|
||||
HE_IT = 296
|
||||
SHE_HE = 8467
|
||||
SHE_THEY = 657
|
||||
SHE_IT = 307
|
||||
THEY_HE = 276
|
||||
THEY_SHE = 628
|
||||
THEY_IT = 308
|
||||
ANY = 26049
|
||||
OTHER = 19047055
|
||||
ASK = 11873
|
||||
AVOID = NAME_ONLY = 4505281
|
||||
|
||||
def short(self) -> str:
|
||||
i = self
|
||||
s = ''
|
||||
while i > 0:
|
||||
s += BRICKS[i % 32]
|
||||
i >>= 5
|
||||
return s
|
||||
|
||||
def full(self):
|
||||
s = self.short()
|
||||
|
||||
if s in self.PRESETS:
|
||||
return self.PRESETS[s]
|
||||
|
||||
if '+' in s:
|
||||
s1, s2 = s.rsplit('+')
|
||||
s = s1 + '/' + s1 + s2
|
||||
|
||||
return s
|
||||
__str__ = full
|
||||
|
||||
@classmethod
|
||||
def from_short(self, s: str) -> Pronoun:
|
||||
i = 0
|
||||
for j, ch in enumerate(s):
|
||||
i += BRICKS.index(ch) << (5 * j)
|
||||
return Pronoun(i)
|
||||
|
||||
|
||||
|
||||
def dei_args(**renames):
|
||||
"""
|
||||
Allow for aliases in the keyword argument names, in form alias='real_name'.
|
||||
|
||||
DEI utility for those programmers who don't want to have to do with
|
||||
potentially offensive variable naming.
|
||||
|
||||
Dear conservatives, this does not influence the ability to call the wrapped function
|
||||
with the original parameter names.
|
||||
"""
|
||||
def decorator(func: Callable[_T, _U]) -> Callable[_T, _U]:
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
for alias_name, actual_name in renames.items():
|
||||
if alias_name in kwargs:
|
||||
val = kwargs.pop(alias_name)
|
||||
kwargs[actual_name] = val
|
||||
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
"""
|
||||
Web app hardening and PT utilities.
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
SENSITIVE_ENDPOINTS = """
|
||||
/.git
|
||||
/.gitignore
|
||||
/node_modules
|
||||
/wp-admin
|
||||
/wp-login.php
|
||||
/.ht
|
||||
/package.json
|
||||
/package-lock.json
|
||||
/composer.
|
||||
/docker-compose.
|
||||
/config/
|
||||
/config.
|
||||
/secrets.
|
||||
/credentials.
|
||||
/.idea/
|
||||
/.vscode/
|
||||
/storage/
|
||||
/logs/
|
||||
/.DS_Store
|
||||
/backup
|
||||
/.backup
|
||||
/db.sql
|
||||
/database.sql
|
||||
/.vite
|
||||
""".split()
|
||||
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
"""
|
||||
Exceptions and throwables for all purposes!
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
|
||||
class PoliticalError(Exception):
|
||||
"""
|
||||
Base class for anything that is refused to be executed for political reasons.
|
||||
"""
|
||||
|
||||
class PoliticalWarning(PoliticalError, Warning):
|
||||
"""
|
||||
Base class for politically suspicious behaviors.
|
||||
"""
|
||||
|
||||
class MissingConfigError(LookupError):
|
||||
"""
|
||||
Config variable not found.
|
||||
|
||||
Raised when a config property is marked as required, but no property with
|
||||
that name is found.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class MissingConfigWarning(MissingConfigError, Warning):
|
||||
"""
|
||||
A required config property is missing, and the application is assuming a default value.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class LexError(SyntaxError):
|
||||
"""
|
||||
Illegal character or sequence found in the token stream.
|
||||
"""
|
||||
|
||||
class InconsistencyError(RuntimeError):
|
||||
"""
|
||||
This program is in a state which it's not supposed to be in.
|
||||
"""
|
||||
|
||||
class NotFoundError(LookupError):
|
||||
"""
|
||||
The requested item was not found.
|
||||
"""
|
||||
# Werkzeug et al.
|
||||
code = 404
|
||||
|
||||
class BabelTowerError(NotFoundError):
|
||||
"""
|
||||
The user requested a language that cannot be understood.
|
||||
"""
|
||||
|
||||
class BadLuckError(Exception):
|
||||
"""
|
||||
Stuff did not go as expected.
|
||||
|
||||
Raised by @lucky decorator.
|
||||
"""
|
||||
|
||||
class TerminalRequiredError(OSError):
|
||||
"""
|
||||
Raised by terminal_required() decorator when a function is called from a non-interactive environment.
|
||||
"""
|
||||
|
||||
class BrokenStringsError(OSError):
|
||||
"""
|
||||
Issues related to audio happened, i.e. appropriate executables/libraries/drivers are not installed.
|
||||
"""
|
||||
|
||||
class Fahrenheit451Error(PoliticalError):
|
||||
"""
|
||||
Base class for thought crimes related to arts (e.g. writing, visual arts, music)
|
||||
"""
|
||||
|
||||
# Werkzeug
|
||||
code = 451
|
||||
|
||||
class FuckAroundFindOutError(PoliticalError):
|
||||
"""
|
||||
Raised when there is no actual grounds to raise an exception, but you did something in the past to deserve this outcome.
|
||||
|
||||
Ideal for permanent service bans or similar.
|
||||
"""
|
||||
|
||||
__all__ = (
|
||||
'MissingConfigError', 'MissingConfigWarning', 'LexError', 'InconsistencyError', 'NotFoundError',
|
||||
'TerminalRequiredError', 'PoliticalError', 'PoliticalWarning', 'Fahrenheit451Error', 'FuckAroundFindOutError',
|
||||
'BrokenStringsError', 'BadLuckError'
|
||||
)
|
||||
|
|
@ -15,12 +15,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|||
"""
|
||||
|
||||
from typing import Any
|
||||
from flask import Flask, abort, current_app, g, request
|
||||
|
||||
from suou.http import WantsContentType
|
||||
from flask import Flask, current_app, g, request
|
||||
from .i18n import I18n
|
||||
from .configparse import ConfigOptions
|
||||
from .dorks import SENSITIVE_ENDPOINTS
|
||||
|
||||
|
||||
def add_context_from_config(app: Flask, config: ConfigOptions) -> Flask:
|
||||
|
|
@ -69,34 +66,6 @@ def get_flask_conf(key: str, default = None, *, app: Flask | None = None) -> Any
|
|||
app = current_app
|
||||
return app.config.get(key, default)
|
||||
|
||||
def harden(app: Flask):
|
||||
"""
|
||||
Make common "dork" endpoints unavailable
|
||||
|
||||
XXX UNTESTED!
|
||||
"""
|
||||
i = 1
|
||||
for ep in SENSITIVE_ENDPOINTS:
|
||||
@app.route(f'{ep}<path:rest>', name=f'unavailable_{i}')
|
||||
def unavailable(rest):
|
||||
abort(403)
|
||||
i += 1
|
||||
|
||||
return app
|
||||
|
||||
def negotiate() -> WantsContentType:
|
||||
"""
|
||||
Return an appropriate MIME type for the sake of content negotiation.
|
||||
"""
|
||||
if any(request.path.startswith(f'/{p.strip('/')}/') for p in current_app.config.get('REST_PATHS', [])):
|
||||
return WantsContentType.JSON
|
||||
elif request.user_agent.string.startswith('Mozilla/'):
|
||||
return WantsContentType.HTML
|
||||
else:
|
||||
return request.accept_mimetypes.best_match([WantsContentType.PLAIN, WantsContentType.JSON, WantsContentType.HTML])
|
||||
|
||||
|
||||
# Optional dependency: do not import into __init__.py
|
||||
__all__ = ('add_context_from_config', 'add_i18n', 'get_flask_conf', 'harden', 'negotiate')
|
||||
__all__ = ('add_context_from_config', 'add_i18n', 'get_flask_conf')
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|||
|
||||
from typing import Any, Mapping
|
||||
import warnings
|
||||
from flask import Response, current_app, make_response
|
||||
from flask import current_app, Response, make_response
|
||||
from flask_restx import Api as _Api
|
||||
|
||||
from .codecs import jsondecode, jsonencode, want_bytes, want_str
|
||||
|
|
@ -74,5 +74,5 @@ class Api(_Api):
|
|||
super().__init__(*a, **ka)
|
||||
self.representations['application/json'] = output_json
|
||||
|
||||
# Optional dependency: do not import into __init__.py
|
||||
|
||||
__all__ = ('Api',)
|
||||
|
|
@ -1,8 +1,6 @@
|
|||
"""
|
||||
Utilities for Flask-SQLAlchemy binding.
|
||||
|
||||
This module has been emptied in 0.12.0 following deprecation removals.
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
|
@ -16,6 +14,67 @@ This software is distributed on an "AS IS" BASIS,
|
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from functools import partial
|
||||
from typing import Any, Callable, Never
|
||||
|
||||
# Optional dependency: do not import into __init__.py
|
||||
__all__ = ()
|
||||
from flask import abort, request
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from sqlalchemy.orm import DeclarativeBase, Session
|
||||
|
||||
from .codecs import want_bytes
|
||||
from .sqlalchemy import AuthSrc, require_auth_base
|
||||
|
||||
class FlaskAuthSrc(AuthSrc):
|
||||
'''
|
||||
|
||||
'''
|
||||
db: SQLAlchemy
|
||||
def __init__(self, db: SQLAlchemy):
|
||||
super().__init__()
|
||||
self.db = db
|
||||
def get_session(self) -> Session:
|
||||
return self.db.session
|
||||
def get_token(self):
|
||||
if request.authorization:
|
||||
return request.authorization.token
|
||||
def get_signature(self) -> bytes:
|
||||
sig = request.headers.get('authorization-signature', None)
|
||||
return want_bytes(sig) if sig else None
|
||||
def invalid_exc(self, msg: str = 'Validation failed') -> Never:
|
||||
abort(400, msg)
|
||||
def required_exc(self):
|
||||
abort(401, 'Login required')
|
||||
|
||||
def require_auth(cls: type[DeclarativeBase], db: SQLAlchemy) -> Callable:
|
||||
"""
|
||||
Make an auth_required() decorator for Flask views.
|
||||
|
||||
This looks for a token in the Authorization header, validates it, loads the
|
||||
appropriate object, and injects it as the user= parameter.
|
||||
|
||||
NOTE: the actual decorator to be used on routes is **auth_required()**,
|
||||
NOT require_auth() which is the **constructor** for it.
|
||||
|
||||
cls is a SQLAlchemy table.
|
||||
db is a flask_sqlalchemy.SQLAlchemy() binding.
|
||||
|
||||
Usage:
|
||||
|
||||
auth_required = require_auth(User, db)
|
||||
|
||||
@route('/admin')
|
||||
@auth_required(validators=[lambda x: x.is_administrator])
|
||||
def super_secret_stuff(user):
|
||||
pass
|
||||
|
||||
NOTE: require_auth() DOES NOT work with flask_restx.
|
||||
"""
|
||||
def auth_required(**kwargs):
|
||||
return require_auth_base(cls=cls, src=FlaskAuthSrc(db), **kwargs)
|
||||
|
||||
auth_required.__doc__ = require_auth_base.__doc__
|
||||
|
||||
return auth_required
|
||||
|
||||
|
||||
__all__ = ('require_auth', )
|
||||
|
|
|
|||
|
|
@ -14,63 +14,47 @@ This software is distributed on an "AS IS" BASIS,
|
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from collections import namedtuple
|
||||
import math
|
||||
from threading import RLock
|
||||
import time
|
||||
from types import CoroutineType, NoneType
|
||||
from typing import Any, Callable, Iterable, Mapping, Never, TypeVar
|
||||
from typing import Callable
|
||||
import warnings
|
||||
from functools import update_wrapper, wraps, lru_cache
|
||||
|
||||
from suou.itertools import hashed_list
|
||||
|
||||
_T = TypeVar('_T')
|
||||
_U = TypeVar('_U')
|
||||
|
||||
|
||||
def _suou_deprecated(message: str, /, *, category=DeprecationWarning, stacklevel: int = 1) -> Callable[[Callable[_T, _U]], Callable[_T, _U]]:
|
||||
"""
|
||||
Backport of PEP 702 for Python <=3.12.
|
||||
The stack_level stuff is used by warnings.warn() btw
|
||||
"""
|
||||
def decorator(func: Callable[_T, _U]) -> Callable[_T, _U]:
|
||||
@wraps(func)
|
||||
def wrapper(*a, **ka):
|
||||
if category is not None:
|
||||
warnings.warn(message, category, stacklevel=stacklevel)
|
||||
return func(*a, **ka)
|
||||
func.__deprecated__ = True
|
||||
wrapper.__deprecated__ = True
|
||||
return wrapper
|
||||
return decorator
|
||||
from functools import wraps
|
||||
|
||||
try:
|
||||
from warnings import deprecated
|
||||
except ImportError:
|
||||
# Python <=3.12 does not implement warnings.deprecated
|
||||
deprecated = _suou_deprecated
|
||||
def deprecated(message: str, /, *, category=DeprecationWarning, stacklevel:int=1):
|
||||
"""
|
||||
Backport of PEP 702 for Python <=3.12.
|
||||
The stack_level stuff is not reimplemented on purpose because
|
||||
too obscure for the average programmer.
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
def wrapper(*a, **ka):
|
||||
if category is not None:
|
||||
warnings.warn(message, category, stacklevel=stacklevel)
|
||||
return func(*a, **ka)
|
||||
func.__deprecated__ = True
|
||||
wrapper.__deprecated__ = True
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
## this syntactic sugar for deprecated() is ... deprecated, which is ironic.
|
||||
## Needed move because VSCode seems to not sense deprecated_alias()es as deprecated.
|
||||
@deprecated('use deprecated(message)(func) instead')
|
||||
def deprecated_alias(func: Callable[_T, _U], /, message='use .{name}() instead', *, category=DeprecationWarning) -> Callable[_T, _U]:
|
||||
def deprecated_alias(func: Callable, /, message='use .{name}() instead', *, category=DeprecationWarning) -> Callable:
|
||||
"""
|
||||
Syntactic sugar helper for renaming functions.
|
||||
|
||||
DEPRECATED use deprecated(message)(func) instead
|
||||
"""
|
||||
@deprecated(message.format(name=func.__name__), category=category)
|
||||
@wraps(func)
|
||||
def deprecated_wrapper(*a, **k) -> _U:
|
||||
return func(*a, **k)
|
||||
return deprecated_wrapper
|
||||
return deprecated(message.format(name=func.__name__), category=category)(func)
|
||||
|
||||
def not_implemented(msg: Callable | str | None = None):
|
||||
"""
|
||||
A more elegant way to say a method is not implemented, but may get in the future.
|
||||
"""
|
||||
def decorator(func: Callable[_T, Any]) -> Callable[_T, Never]:
|
||||
def decorator(func: Callable) -> Callable:
|
||||
da_msg = msg if isinstance(msg, str) else 'method {name}() is not implemented'.format(name=func.__name__)
|
||||
@wraps(func)
|
||||
def wrapper(*a, **k):
|
||||
|
|
@ -80,265 +64,6 @@ def not_implemented(msg: Callable | str | None = None):
|
|||
return decorator(msg)
|
||||
return decorator
|
||||
|
||||
def future(message: str | None = None, *, version: str = None):
|
||||
"""
|
||||
Describes experimental or future API's introduced as bug fixes (including as backports)
|
||||
but not yet intended for general use (mostly to keep semver consistent).
|
||||
|
||||
version= is the intended version release.
|
||||
|
||||
*New in 0.7.0*
|
||||
"""
|
||||
def decorator(func: Callable[_T, _U]) -> Callable[_T, _U]:
|
||||
@wraps(func)
|
||||
def wrapper(*a, **k) -> _U:
|
||||
warnings.warn(message or (
|
||||
f'{func.__name__}() is intended for release on {version} and not ready for use right now'
|
||||
if version else
|
||||
f'{func.__name__}() is intended for a future release and not ready for use right now'
|
||||
), FutureWarning)
|
||||
return func(*a, **k)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
def flat_args(args: Iterable, kwds: Mapping, typed,
|
||||
kwd_mark = (object(),),
|
||||
fasttypes = {int, str, frozenset, NoneType},
|
||||
sorted=sorted, tuple=tuple, type=type, len=len):
|
||||
'''Turn optionally positional and keyword arguments into a hashable key for use in caches.
|
||||
|
||||
Shamelessly copied from functools._make_key() from the Python Standard Library.
|
||||
Never trust underscores, you know.
|
||||
|
||||
This assumes all argument types are hashable!'''
|
||||
key = args
|
||||
if kwds:
|
||||
sorted_items = sorted(kwds.items())
|
||||
key += kwd_mark
|
||||
for item in sorted_items:
|
||||
key += item
|
||||
if typed:
|
||||
key += tuple(type(v) for v in args)
|
||||
if kwds:
|
||||
key += tuple(type(v) for k, v in sorted_items)
|
||||
elif len(key) == 1 and type(key[0]) in fasttypes:
|
||||
return key[0]
|
||||
return hashed_list(key)
|
||||
|
||||
def _make_alru_cache(_CacheInfo):
|
||||
def alru_cache(maxsize: int = 128, typed: bool = False):
|
||||
"""
|
||||
Reimplementation of lru_cache(). In fact it's lru_cache() from Python==3.13.7 Standard
|
||||
Library with just three lines modified.
|
||||
|
||||
Shamelessly adapted from the Python Standard Library with modifications.
|
||||
|
||||
PSA there is no C speed up. Unlike PSL. Sorry.
|
||||
|
||||
*New in 0.5.0*
|
||||
"""
|
||||
|
||||
# Users should only access the lru_cache through its public API:
|
||||
# cache_info, cache_clear, and f.__wrapped__
|
||||
# The internals of the lru_cache are encapsulated for thread safety and
|
||||
# to allow the implementation to change (including a possible C version).
|
||||
# suou.alru_cache is based on pure-Python functools.lru_cache() as of Python 3.13.7.
|
||||
|
||||
if isinstance(maxsize, int):
|
||||
# Negative maxsize is treated as 0
|
||||
if maxsize < 0:
|
||||
maxsize = 0
|
||||
elif callable(maxsize) and isinstance(typed, bool):
|
||||
# The user_function was passed in directly via the maxsize argument
|
||||
user_function, maxsize = maxsize, 128
|
||||
wrapper = _alru_cache_wrapper(user_function, maxsize, typed)
|
||||
wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
|
||||
return update_wrapper(wrapper, user_function)
|
||||
elif maxsize is not None:
|
||||
raise TypeError(
|
||||
'Expected first argument to be an integer, a callable, or None')
|
||||
|
||||
def decorating_function(user_function: CoroutineType):
|
||||
wrapper = _alru_cache_wrapper(user_function, maxsize, typed)
|
||||
wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
|
||||
return update_wrapper(wrapper, user_function)
|
||||
|
||||
return decorating_function
|
||||
|
||||
def _alru_cache_wrapper(user_function, maxsize, typed):
|
||||
# Constants shared by all lru cache instances:
|
||||
sentinel = object() # unique object used to signal cache misses
|
||||
make_key = flat_args # build a key from the function arguments
|
||||
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
||||
|
||||
cache = {}
|
||||
hits = misses = 0
|
||||
full = False
|
||||
cache_get = cache.get # bound method to lookup a key or return None
|
||||
cache_len = cache.__len__ # get cache size without calling len()
|
||||
lock = RLock() # because linkedlist updates aren't threadsafe
|
||||
root = [] # root of the circular doubly linked list
|
||||
root[:] = [root, root, None, None] # initialize by pointing to self
|
||||
|
||||
if maxsize == 0:
|
||||
|
||||
async def wrapper(*args, **kwds):
|
||||
# No caching -- just a statistics update
|
||||
nonlocal misses
|
||||
misses += 1
|
||||
result = await user_function(*args, **kwds)
|
||||
return result
|
||||
|
||||
elif maxsize is None:
|
||||
|
||||
async def wrapper(*args, **kwds):
|
||||
# Simple caching without ordering or size limit
|
||||
nonlocal hits, misses
|
||||
key = make_key(args, kwds, typed)
|
||||
result = cache_get(key, sentinel)
|
||||
if result is not sentinel:
|
||||
hits += 1
|
||||
return result
|
||||
misses += 1
|
||||
result = await user_function(*args, **kwds)
|
||||
cache[key] = result
|
||||
return result
|
||||
|
||||
else:
|
||||
|
||||
async def wrapper(*args, **kwds):
|
||||
# Size limited caching that tracks accesses by recency
|
||||
nonlocal root, hits, misses, full
|
||||
key = make_key(args, kwds, typed)
|
||||
with lock:
|
||||
link = cache_get(key)
|
||||
if link is not None:
|
||||
# Move the link to the front of the circular queue
|
||||
link_prev, link_next, _key, result = link
|
||||
link_prev[NEXT] = link_next
|
||||
link_next[PREV] = link_prev
|
||||
last = root[PREV]
|
||||
last[NEXT] = root[PREV] = link
|
||||
link[PREV] = last
|
||||
link[NEXT] = root
|
||||
hits += 1
|
||||
return result
|
||||
misses += 1
|
||||
result = await user_function(*args, **kwds)
|
||||
with lock:
|
||||
if key in cache:
|
||||
# Getting here means that this same key was added to the
|
||||
# cache while the lock was released. Since the link
|
||||
# update is already done, we need only return the
|
||||
# computed result and update the count of misses.
|
||||
pass
|
||||
elif full:
|
||||
# Use the old root to store the new key and result.
|
||||
oldroot = root
|
||||
oldroot[KEY] = key
|
||||
oldroot[RESULT] = result
|
||||
# Empty the oldest link and make it the new root.
|
||||
# Keep a reference to the old key and old result to
|
||||
# prevent their ref counts from going to zero during the
|
||||
# update. That will prevent potentially arbitrary object
|
||||
# clean-up code (i.e. __del__) from running while we're
|
||||
# still adjusting the links.
|
||||
root = oldroot[NEXT]
|
||||
oldkey = root[KEY]
|
||||
oldresult = root[RESULT]
|
||||
root[KEY] = root[RESULT] = None
|
||||
# Now update the cache dictionary.
|
||||
del cache[oldkey]
|
||||
# Save the potentially reentrant cache[key] assignment
|
||||
# for last, after the root and links have been put in
|
||||
# a consistent state.
|
||||
cache[key] = oldroot
|
||||
else:
|
||||
# Put result in a new link at the front of the queue.
|
||||
last = root[PREV]
|
||||
link = [last, root, key, result]
|
||||
last[NEXT] = root[PREV] = cache[key] = link
|
||||
# Use the cache_len bound method instead of the len() function
|
||||
# which could potentially be wrapped in an lru_cache itself.
|
||||
full = (cache_len() >= maxsize)
|
||||
return result
|
||||
|
||||
def cache_info():
|
||||
"""Report cache statistics"""
|
||||
with lock:
|
||||
return _CacheInfo(hits, misses, maxsize, cache_len())
|
||||
|
||||
def cache_clear():
|
||||
"""Clear the cache and cache statistics"""
|
||||
nonlocal hits, misses, full
|
||||
with lock:
|
||||
cache.clear()
|
||||
root[:] = [root, root, None, None]
|
||||
hits = misses = 0
|
||||
full = False
|
||||
|
||||
wrapper.cache_info = cache_info
|
||||
wrapper.cache_clear = cache_clear
|
||||
return wrapper
|
||||
|
||||
return alru_cache
|
||||
|
||||
alru_cache = _make_alru_cache(namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"]))
|
||||
del _make_alru_cache
|
||||
|
||||
def timed_cache(ttl: int, maxsize: int = 128, typed: bool = False, *, async_: bool = False) -> Callable[[Callable], Callable]:
|
||||
"""
|
||||
LRU cache which expires after the TTL in seconds passed as argument.
|
||||
|
||||
Supports coroutines with async_=True.
|
||||
|
||||
*New in 0.5.0*
|
||||
"""
|
||||
def decorator(func: Callable[_T, _U]) -> Callable[_T, _U]:
|
||||
start_time = None
|
||||
|
||||
if async_:
|
||||
@alru_cache(maxsize, typed)
|
||||
async def inner_wrapper(ttl_period: int, /, *a, **k):
|
||||
return await func(*a, **k)
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(*a, **k):
|
||||
nonlocal start_time
|
||||
if not start_time:
|
||||
start_time = int(time.time())
|
||||
return await inner_wrapper(math.floor((time.time() - start_time) // ttl), *a, **k)
|
||||
|
||||
return wrapper
|
||||
else:
|
||||
@lru_cache(maxsize, typed)
|
||||
def inner_wrapper(ttl_period: int, /, *a, **k):
|
||||
return func(*a, **k)
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*a, **k):
|
||||
nonlocal start_time
|
||||
if not start_time:
|
||||
start_time = int(time.time())
|
||||
return inner_wrapper(math.floor((time.time() - start_time) // ttl), *a, **k)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
def none_pass(func: Callable[_T, _U], *args, **kwargs) -> Callable[_T, _U]:
|
||||
"""
|
||||
Wrap callable so that gets called only on not None values.
|
||||
|
||||
Shorthand for func(x) if x is not None else None
|
||||
|
||||
*New in 0.5.0*
|
||||
"""
|
||||
@wraps(func)
|
||||
def wrapper(x):
|
||||
if x is None:
|
||||
return x
|
||||
return func(x, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
__all__ = (
|
||||
'deprecated', 'not_implemented', 'timed_cache', 'none_pass', 'alru_cache'
|
||||
)
|
||||
'deprecated', 'not_implemented'
|
||||
)
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
"""
|
||||
Helpers for "Glue" code, aka code meant to adapt or patch other libraries
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
import importlib
|
||||
from types import ModuleType
|
||||
|
||||
from functools import wraps
|
||||
from suou.classtools import MISSING
|
||||
from suou.functools import future
|
||||
|
||||
|
||||
@future()
|
||||
class FakeModule(ModuleType):
|
||||
"""
|
||||
Fake module used in @glue() in case of import error
|
||||
"""
|
||||
def __init__(self, name: str, exc: Exception):
|
||||
super().__init__(name)
|
||||
self._exc = exc
|
||||
def __getattr__(self, name: str):
|
||||
raise AttributeError(f'Module {self.__name__} not found; this feature is not available ({self._exc})') from self._exc
|
||||
|
||||
|
||||
@future()
|
||||
def glue(*modules):
|
||||
"""
|
||||
Helper for "glue" code -- it imports the given modules and passes them as keyword arguments to the wrapped functions.
|
||||
|
||||
EXPERIMENTAL
|
||||
"""
|
||||
module_dict = dict()
|
||||
imports_succeeded = True
|
||||
|
||||
for module in modules:
|
||||
try:
|
||||
module_dict[module] = importlib.import_module(module)
|
||||
except Exception as e:
|
||||
imports_succeeded = False
|
||||
module_dict[module] = FakeModule(module, e)
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*a, **k):
|
||||
try:
|
||||
result = func(*a, **k)
|
||||
except Exception:
|
||||
if not imports_succeeded:
|
||||
## XXX return an iterable? A Fake****?
|
||||
return MISSING
|
||||
raise
|
||||
return result
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
# This module is experimental and therefore not re-exported into __init__
|
||||
__all__ = ('glue', 'FakeModule')
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
"""
|
||||
Framework-agnostic utilities for web app development.
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
import enum
|
||||
|
||||
class WantsContentType(enum.Enum):
|
||||
PLAIN = 'text/plain'
|
||||
JSON = 'application/json'
|
||||
HTML = 'text/html'
|
||||
|
||||
|
||||
|
||||
__all__ = ('WantsContentType',)
|
||||
|
|
@ -23,7 +23,6 @@ import os
|
|||
import toml
|
||||
from typing import Mapping
|
||||
|
||||
from .exceptions import BabelTowerError
|
||||
|
||||
class IdentityLang:
|
||||
'''
|
||||
|
|
@ -82,10 +81,7 @@ class I18n(metaclass=ABCMeta):
|
|||
def load_lang(self, name: str, filename: str | None = None) -> I18nLang:
|
||||
if not filename:
|
||||
filename = self.filename_tmpl.format(lang=name, ext=self.EXT)
|
||||
try:
|
||||
data = self.load_file(filename)
|
||||
except OSError as e:
|
||||
raise BabelTowerError(f'unknown language: {name}') from e
|
||||
data = self.load_file(filename)
|
||||
l = self.langs.setdefault(name, I18nLang())
|
||||
l.update(data[name] if name in data else data)
|
||||
if name != self.default_lang:
|
||||
|
|
|
|||
|
|
@ -31,7 +31,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|||
from __future__ import annotations
|
||||
import base64
|
||||
import binascii
|
||||
import datetime
|
||||
import enum
|
||||
from functools import cached_property
|
||||
import hashlib
|
||||
|
|
@ -41,8 +40,6 @@ import os
|
|||
from typing import Iterable, override
|
||||
import warnings
|
||||
|
||||
from suou.calendar import want_timestamp
|
||||
|
||||
from .functools import deprecated
|
||||
from .codecs import b32lencode, b64encode, cb32decode, cb32encode, want_str
|
||||
|
||||
|
|
@ -123,30 +120,20 @@ class SiqGen:
|
|||
"""
|
||||
Implement a SIS-compliant SIQ generator.
|
||||
"""
|
||||
__slots__ = ('domain_hash', 'last_gen_ts', 'counters', 'shard_id', '_test_cur_ts', '__weakref__')
|
||||
__slots__ = ('domain_hash', 'last_gen_ts', 'counters', 'shard_id', '__weakref__')
|
||||
|
||||
domain_hash: int
|
||||
last_gen_ts: int
|
||||
shard_id: int
|
||||
counters: dict[SiqType, int]
|
||||
_test_cur_timestamp: int | None
|
||||
|
||||
def __init__(self, domain: str, last_siq: int = 0, local_id: int | None = None, shard_id: int | None = None):
|
||||
self.domain_hash = make_domain_hash(domain, local_id)
|
||||
self._test_cur_ts = None ## test only
|
||||
self.last_gen_ts = min(last_siq >> 56, self.cur_timestamp())
|
||||
self.counters = dict()
|
||||
self.shard_id = (shard_id or os.getpid()) % 256
|
||||
def cur_timestamp(self) -> int:
|
||||
if self._test_cur_ts is not None:
|
||||
return self._test_cur_ts
|
||||
return int(time.time() * (1 << 16))
|
||||
def set_cur_timestamp(self, value: datetime.datetime):
|
||||
"""
|
||||
Intended to be used by tests only! Do not use in production!
|
||||
"""
|
||||
self._test_cur_ts = int(want_timestamp(value) * 2 ** 16)
|
||||
self.last_gen_ts = int(want_timestamp(value) * 2 ** 16)
|
||||
def generate(self, /, typ: SiqType, n: int = 1) -> Iterable[int]:
|
||||
"""
|
||||
Generate one or more SIQ's.
|
||||
|
|
@ -165,7 +152,7 @@ class SiqGen:
|
|||
elif now > self.last_gen_ts:
|
||||
self.counters[typ] = 0
|
||||
while n:
|
||||
idseq = typ.prepend(self.counters.setdefault(typ, 0))
|
||||
idseq = typ.prepend(self.counters[typ])
|
||||
if idseq >= (1 << 16):
|
||||
while (now := self.cur_timestamp()) <= self.last_gen_ts:
|
||||
time.sleep(1 / (1 << 16))
|
||||
|
|
@ -238,7 +225,6 @@ class Siq(int):
|
|||
"""
|
||||
Representation of a SIQ as an integer.
|
||||
"""
|
||||
|
||||
def to_bytes(self, length: int = 14, byteorder = 'big', *, signed: bool = False) -> bytes:
|
||||
return super().to_bytes(length, byteorder, signed=signed)
|
||||
@classmethod
|
||||
|
|
@ -256,13 +242,11 @@ class Siq(int):
|
|||
@classmethod
|
||||
def from_cb32(cls, val: str | bytes):
|
||||
return cls.from_bytes(cb32decode(want_str(val).zfill(24)))
|
||||
|
||||
|
||||
def to_hex(self) -> str:
|
||||
return f'{self:x}'
|
||||
|
||||
def to_oct(self) -> str:
|
||||
return f'{self:o}'
|
||||
|
||||
def to_b32l(self) -> str:
|
||||
"""
|
||||
This is NOT the URI serializer!
|
||||
|
|
@ -312,10 +296,8 @@ class Siq(int):
|
|||
raise ValueError('checksum mismatch')
|
||||
return cls(int.from_bytes(b, 'big'))
|
||||
|
||||
|
||||
def to_mastodon(self, /, domain: str | None = None):
|
||||
return f'@{self:u}{"@" if domain else ""}{domain}'
|
||||
|
||||
def to_matrix(self, /, domain: str):
|
||||
return f'@{self:u}:{domain}'
|
||||
|
||||
|
|
|
|||
|
|
@ -14,30 +14,20 @@ This software is distributed on an "AS IS" BASIS,
|
|||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
'''
|
||||
|
||||
from functools import wraps
|
||||
from typing import Any, Callable, Iterable, MutableMapping, TypeVar
|
||||
from typing import Any, Iterable, MutableMapping, TypeVar
|
||||
import warnings
|
||||
|
||||
from suou.classtools import MISSING
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
def makelist(l: Any, wrap: bool = True) -> list | Callable[Any, list]:
|
||||
def makelist(l: Any) -> list:
|
||||
'''
|
||||
Make a list out of an iterable or a single value.
|
||||
|
||||
*Changed in 0.4.0* Now supports a callable: can be used to decorate generators and turn them into lists.
|
||||
Pass wrap=False to return instead the unwrapped function in a list.
|
||||
|
||||
*Changed in 0.11.0*: ``wrap`` argument is now no more keyword only.
|
||||
'''
|
||||
if callable(l) and wrap:
|
||||
return wraps(l)(lambda *a, **k: makelist(l(*a, **k), wrap=False))
|
||||
if isinstance(l, (str, bytes, bytearray)):
|
||||
return [l]
|
||||
elif isinstance(l, Iterable):
|
||||
return list(l)
|
||||
elif l in (None, NotImplemented, Ellipsis, MISSING):
|
||||
elif l in (None, NotImplemented, Ellipsis):
|
||||
return []
|
||||
else:
|
||||
return [l]
|
||||
|
|
@ -93,39 +83,6 @@ def additem(obj: MutableMapping, /, name: str = None):
|
|||
return func
|
||||
return decorator
|
||||
|
||||
def addattr(obj: Any, /, name: str = None):
|
||||
"""
|
||||
Same as additem() but setting as attribute instead.
|
||||
"""
|
||||
def decorator(func):
|
||||
key = name or func.__name__
|
||||
if hasattr(obj, key):
|
||||
warnings.warn(f'object does already have attribute {key!r}')
|
||||
setattr(obj, key, func)
|
||||
return func
|
||||
return decorator
|
||||
|
||||
class hashed_list(list):
|
||||
"""
|
||||
Used by lru_cache() functions.
|
||||
|
||||
This class guarantees that hash() will be called no more than once
|
||||
per element. This is important because the lru_cache() will hash
|
||||
the key multiple times on a cache miss.
|
||||
|
||||
Shamelessly copied from functools._HashedSeq() from the Python Standard Library.
|
||||
Never trust underscores, you know.
|
||||
"""
|
||||
|
||||
__slots__ = 'hashvalue'
|
||||
|
||||
def __init__(self, tup, hash=hash):
|
||||
self[:] = tup
|
||||
self.hashvalue = hash(tup)
|
||||
|
||||
def __hash__(self):
|
||||
return self.hashvalue
|
||||
|
||||
|
||||
__all__ = ('makelist', 'kwargs_prefix', 'ltuple', 'rtuple', 'additem', 'addattr')
|
||||
__all__ = ('makelist', 'kwargs_prefix', 'ltuple', 'rtuple', 'additem')
|
||||
|
||||
|
|
|
|||
|
|
@ -1,98 +0,0 @@
|
|||
"""
|
||||
TOS / policy building blocks for the lazy, in English language.
|
||||
|
||||
XXX DANGER! This is not replacement for legal advice. Contact your lawyer.
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
# TODO more snippets
|
||||
|
||||
from .strtools import SpitText
|
||||
|
||||
|
||||
INDEMNIFY = """
|
||||
You agree to indemnify and hold harmless {0} from any and all claims, damages, liabilities, costs and expenses, including reasonable and unreasonable counsel and attorney’s fees, arising out of any breach of this agreement.
|
||||
"""
|
||||
|
||||
NO_WARRANTY = """
|
||||
Except as represented in this agreement, the {0} is provided “AS IS”. Other than as provided in this agreement, {1} makes no other warranties, express or implied, and hereby disclaims all implied warranties, including any warranty of merchantability and warranty of fitness for a particular purpose.
|
||||
"""
|
||||
|
||||
GOVERNING_LAW = """
|
||||
These terms of services are governed by, and shall be interpreted in accordance with, the laws of {0}. You consent to the sole jurisdiction of {1} for all disputes between You and {2}, and You consent to the sole application of {3} law for all such disputes.
|
||||
"""
|
||||
|
||||
ENGLISH_FIRST = """
|
||||
In case there is any inconsistency between these Terms and any translation into other languages, the English language version takes precedence.
|
||||
"""
|
||||
|
||||
EXPECT_UPDATES = """
|
||||
{0} may periodically update these Terms of Service. Every time this happens, {0} will make its best efforts to notify You of such changes.
|
||||
|
||||
Whenever {0} updates these Terms of Service, Your continued use of the {0} platform constitutes Your agreement to the updated Terms of Service.
|
||||
"""
|
||||
|
||||
SEVERABILITY = """
|
||||
If one clause of these Terms of Service or any policy incorporated here by reference is determined by a court to be unenforceable, the remainder of the Terms and Content Policy shall remain in force.
|
||||
"""
|
||||
|
||||
COMPLETENESS = """
|
||||
These Terms, together with the other policies incorporated into them by reference, contain all the terms and conditions agreed upon by You and {0} regarding Your use of the {0} service. No other agreement, oral or otherwise, will be deemed to exist or to bind either of the parties to this Agreement.
|
||||
"""
|
||||
|
||||
|
||||
class Lawyer(SpitText):
|
||||
"""
|
||||
A tool to ease the writing of Terms of Service for web apps.
|
||||
|
||||
NOT A REPLACEMENT FOR A REAL LAWYER AND NOT LEGAL ADVICE
|
||||
|
||||
*New in 0.11.0*
|
||||
"""
|
||||
|
||||
def __init__(self, /,
|
||||
app_name: str, domain_name: str,
|
||||
company_name: str, jurisdiction: str,
|
||||
country: str, country_adjective: str
|
||||
):
|
||||
self.app_name = app_name
|
||||
self.domain_name = domain_name
|
||||
self.company_name = company_name
|
||||
self.jurisdiction = jurisdiction
|
||||
self.country = country
|
||||
self.country_adjective = country_adjective
|
||||
|
||||
def indemnify(self):
|
||||
return self.format(INDEMNIFY, 'app_name')
|
||||
|
||||
def no_warranty(self):
|
||||
return self.format(NO_WARRANTY, 'app_name', 'company_name')
|
||||
|
||||
def governing_law(self) -> str:
|
||||
return self.format(GOVERNING_LAW, 'country', 'jurisdiction', 'app_name', 'country_adjective')
|
||||
|
||||
def english_first(self) -> str:
|
||||
return ENGLISH_FIRST
|
||||
|
||||
def expect_updates(self) -> str:
|
||||
return self.format(EXPECT_UPDATES, 'app_name')
|
||||
|
||||
def severability(self) -> str:
|
||||
return SEVERABILITY
|
||||
|
||||
def completeness(self) -> str:
|
||||
return self.format(COMPLETENESS, 'app_name')
|
||||
|
||||
# This module is experimental and therefore not re-exported into __init__
|
||||
__all__ = ('Lawyer',)
|
||||
|
|
@ -1,96 +0,0 @@
|
|||
"""
|
||||
Utilities for tokenization of text.
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from re import Match
|
||||
|
||||
|
||||
from dataclasses import dataclass
|
||||
import re
|
||||
from typing import Any, Callable, Iterable
|
||||
|
||||
from .exceptions import InconsistencyError, LexError
|
||||
|
||||
from .itertools import makelist
|
||||
|
||||
|
||||
@dataclass
|
||||
class TokenSym:
|
||||
pattern: str
|
||||
label: str
|
||||
cast: Callable[[str], Any] | None = None
|
||||
discard: bool = False
|
||||
|
||||
# convenience methods below
|
||||
def match(self, s: str, index: int = 0) -> Match[str] | None:
|
||||
return re.compile(self.pattern, 0).match(s, index)
|
||||
|
||||
@makelist
|
||||
def symbol_table(*args: Iterable[tuple | TokenSym], whitespace: str | None = None):
|
||||
"""
|
||||
Make a symbol table from a list of tuples.
|
||||
|
||||
Tokens are in form (pattern, label[, cast]) where:
|
||||
- [] means optional
|
||||
- pattern is a regular expression (r-string syntax advised)
|
||||
- label is a constant string
|
||||
- cast is a function
|
||||
|
||||
Need to strip whitespace? Pass the whitespace= keyword parameter.
|
||||
"""
|
||||
for arg in args:
|
||||
if isinstance(arg, TokenSym):
|
||||
pass
|
||||
elif isinstance(arg, tuple):
|
||||
arg = TokenSym(*arg)
|
||||
else:
|
||||
raise TypeError(f'invalid type {arg.__class__.__name__!r}')
|
||||
yield arg
|
||||
if whitespace:
|
||||
yield TokenSym('[' + re.escape(whitespace) + ']+', '', discard=True)
|
||||
|
||||
|
||||
symbol_table: Callable[..., list]
|
||||
|
||||
def ilex(text: str, table: Iterable[TokenSym], *, whitespace = False):
|
||||
"""
|
||||
Return a text as a list of tokens, given a token table (iterable of TokenSym).
|
||||
|
||||
ilex() returns a generator; lex() returns a list.
|
||||
|
||||
table must be a result from symbol_table().
|
||||
"""
|
||||
i = 0
|
||||
while i < len(text):
|
||||
mo = None
|
||||
for sym in table:
|
||||
if mo := re.compile(sym.pattern).match(text, i):
|
||||
if not sym.discard:
|
||||
mtext = mo.group(0)
|
||||
if callable(sym.cast):
|
||||
mtext = sym.cast(mtext)
|
||||
yield (sym.label, mtext)
|
||||
elif whitespace:
|
||||
yield (None, mo.group(0))
|
||||
break
|
||||
if mo is None:
|
||||
raise LexError(f'illegal character near {text[i:i+5]!r}')
|
||||
if i == mo.end(0):
|
||||
raise InconsistencyError
|
||||
i = mo.end(0)
|
||||
|
||||
lex: Callable[..., list] = makelist(ilex)
|
||||
|
||||
__all__ = ('symbol_table', 'lex', 'ilex')
|
||||
116
src/suou/luck.py
116
src/suou/luck.py
|
|
@ -1,116 +0,0 @@
|
|||
"""
|
||||
Fortune, RNG and esoterism.
|
||||
|
||||
*New in 0.7.0*
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from typing import Callable, Generic, Iterable, TypeVar
|
||||
import random
|
||||
from suou.exceptions import BadLuckError
|
||||
|
||||
_T = TypeVar('_T')
|
||||
_U = TypeVar('_U')
|
||||
|
||||
|
||||
def lucky(validators: Iterable[Callable[[_U], bool]] = ()):
|
||||
"""
|
||||
Add one or more constraint on a function's return value.
|
||||
Each validator must return a boolean. If false, the result is considered
|
||||
unlucky and BadLuckError() is raised.
|
||||
|
||||
UNTESTED
|
||||
|
||||
*New in 0.7.0*
|
||||
"""
|
||||
def decorator(func: Callable[_T, _U]) -> Callable[_T, _U]:
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs) -> _U:
|
||||
try:
|
||||
result = func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
raise BadLuckError(f'exception happened: {e}') from e
|
||||
for v in validators:
|
||||
try:
|
||||
if not v(result):
|
||||
message = 'result not expected'
|
||||
raise BadLuckError(f'{message}: {result!r}')
|
||||
except BadLuckError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise BadLuckError(f'cannot validate: {e}') from e
|
||||
return result
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
class RngCallable(Callable, Generic[_T, _U]):
|
||||
"""
|
||||
Overloaded ...randomly chosen callable.
|
||||
|
||||
UNTESTED
|
||||
|
||||
*New in 0.7.0*
|
||||
"""
|
||||
def __init__(self, /, func: Callable[_T, _U] | None = None, weight: int = 1):
|
||||
self._callables = []
|
||||
self._max_weight = 0
|
||||
if callable(func):
|
||||
self.add_callable(func, weight)
|
||||
def add_callable(self, func: Callable[_T, _U], weight: int = 1):
|
||||
"""
|
||||
"""
|
||||
weight = int(weight)
|
||||
if weight <= 0:
|
||||
return
|
||||
self._callables.append((func, weight))
|
||||
self._max_weight += weight
|
||||
def __call__(self, *a, **ka) -> _U:
|
||||
choice = random.randrange(self._max_weight)
|
||||
for w, c in self._callables:
|
||||
if choice < w:
|
||||
return c(*a, **ka)
|
||||
elif choice < 0:
|
||||
raise RuntimeError('inconsistent state')
|
||||
else:
|
||||
choice -= w
|
||||
|
||||
|
||||
def rng_overload(prev_func: RngCallable[..., _U] | int | None, /, *, weight: int = 1) -> RngCallable[..., _U]:
|
||||
"""
|
||||
Decorate the first function with @rng_overload and the weight= parameter
|
||||
(default 1, must be an integer) to create a "RNG" overloaded callable.
|
||||
|
||||
Each call chooses randomly one candidate (weight is taken in consideration),
|
||||
calls it, and returns the result.
|
||||
|
||||
UNTESTED
|
||||
|
||||
*New in 0.7.0*
|
||||
"""
|
||||
if isinstance(prev_func, int) and weight == 1:
|
||||
weight, prev_func = prev_func, None
|
||||
|
||||
def decorator(func: Callable[_T, _U]) -> RngCallable[_T, _U]:
|
||||
nonlocal prev_func
|
||||
if prev_func is None:
|
||||
prev_func = RngCallable(func, weight=weight)
|
||||
else:
|
||||
prev_func.add_callable(func, weight=weight)
|
||||
return prev_func
|
||||
return decorator
|
||||
|
||||
|
||||
# This module is experimental and therefore not re-exported into __init__
|
||||
__all__ = ('lucky', 'rng_overload')
|
||||
|
|
@ -43,9 +43,9 @@ class SpoilerExtension(markdown.extensions.Extension):
|
|||
"""
|
||||
Add spoiler tags to text, using >!Reddit syntax!<.
|
||||
|
||||
If blockquotes interfer with rendered markup, you might want to call
|
||||
SpoilerExtension.patch_blockquote_processor() to clear conflicts with
|
||||
the blockquote processor and allow spoiler tags to start at beginning of line.
|
||||
XXX remember to call SpoilerExtension.patch_blockquote_processor()
|
||||
to clear conflicts with the blockquote processor and allow
|
||||
spoiler tags to start at beginning of line.
|
||||
"""
|
||||
def extendMarkdown(self, md: markdown.Markdown, md_globals=None):
|
||||
md.inlinePatterns.register(SimpleTagInlineProcessor(r'()>!(.*?)!<', 'span class="spoiler"'), 'spoiler', 14)
|
||||
|
|
|
|||
143
src/suou/mat.py
143
src/suou/mat.py
|
|
@ -1,143 +0,0 @@
|
|||
"""
|
||||
Matrix (not the movie...)
|
||||
|
||||
*New in 0.12.0*
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from typing import Collection, Iterable, TypeVar
|
||||
from .functools import deprecated
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
class Matrix(Collection[_T]):
|
||||
"""
|
||||
Minimalist reimplementation of matrices in pure Python.
|
||||
|
||||
This to avoid adding numpy as a dependency.
|
||||
|
||||
*New in 0.12.0*
|
||||
"""
|
||||
_shape: tuple[int, int]
|
||||
_elements: list[_T]
|
||||
|
||||
def shape(self):
|
||||
return self._shape
|
||||
|
||||
def __init__(self, iterable: Iterable[_T] | Iterable[Collection[_T]], shape: tuple[int, int] | None = None):
|
||||
elements = []
|
||||
boundary_x = boundary_y = 0
|
||||
for row in iterable:
|
||||
if isinstance(row, Collection):
|
||||
if not boundary_y:
|
||||
boundary_y = len(row)
|
||||
elements.extend(row)
|
||||
boundary_x += 1
|
||||
elif boundary_y != len(row):
|
||||
raise ValueError('row length mismatch')
|
||||
else:
|
||||
elements.extend(row)
|
||||
boundary_x += 1
|
||||
elif shape:
|
||||
if not boundary_x:
|
||||
boundary_x, boundary_y = shape
|
||||
elements.append(row)
|
||||
self._shape = boundary_x, boundary_y
|
||||
self._elements = elements
|
||||
assert len(self._elements) == boundary_x * boundary_y
|
||||
|
||||
def __getitem__(self, key: tuple[int, int]) -> _T:
|
||||
(x, y), (_, sy) = key, self.shape()
|
||||
|
||||
return self._elements[x * sy + y]
|
||||
|
||||
@property
|
||||
def T(self):
|
||||
sx, sy = self.shape()
|
||||
return Matrix(
|
||||
[
|
||||
[
|
||||
self[j, i] for j in range(sx)
|
||||
] for i in range(sy)
|
||||
]
|
||||
)
|
||||
|
||||
def __matmul__(self, other: Matrix) -> Matrix:
|
||||
(ax, ay), (bx, by) = self.shape(), other.shape()
|
||||
|
||||
if ay != bx:
|
||||
raise ValueError('cannot multiply matrices with incompatible shape')
|
||||
|
||||
return Matrix([
|
||||
[
|
||||
sum(self[i, k] * other[k, j] for k in range(ay)) for j in range(by)
|
||||
] for i in range(ax)
|
||||
])
|
||||
|
||||
def __eq__(self, other: Matrix):
|
||||
try:
|
||||
return self._elements == other._elements and self._shape == other._shape
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def __len__(self):
|
||||
ax, ay = self.shape()
|
||||
return ax * ay
|
||||
|
||||
@deprecated('please use .rows() or .columns() instead')
|
||||
def __iter__(self):
|
||||
return iter(self._elements)
|
||||
|
||||
def __contains__(self, x: object, /) -> bool:
|
||||
return x in self._elements
|
||||
|
||||
def __repr__(self):
|
||||
return f'{self.__class__.__name__}({list(self.rows())})'
|
||||
|
||||
def rows(self):
|
||||
sx, sy = self.shape()
|
||||
return (
|
||||
[self[j, i] for j in range(sy)] for i in range(sx)
|
||||
)
|
||||
|
||||
def columns(self):
|
||||
sx, sy = self.shape()
|
||||
return (
|
||||
[self[j, i] for j in range(sx)] for i in range(sy)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def as_row(cls, iterable: Iterable):
|
||||
return cls([[*iterable]])
|
||||
|
||||
@classmethod
|
||||
def as_column(cls, iterable: Iterable):
|
||||
return cls([[x] for x in iterable])
|
||||
|
||||
def get_column(self, idx = 0):
|
||||
sx, _ = self.shape()
|
||||
return [
|
||||
self[j, idx] for j in range(sx)
|
||||
]
|
||||
|
||||
def get_row(self, idx = 0):
|
||||
_, sy = self.shape()
|
||||
return [
|
||||
self[idx, j] for j in range(sy)
|
||||
]
|
||||
|
||||
__all__ = ('Matrix', )
|
||||
|
||||
|
||||
|
|
@ -135,4 +135,4 @@ class UlidSiqMigrator(SiqMigrator):
|
|||
|
||||
__all__ = (
|
||||
'SnowflakeSiqMigrator', 'UlidSiqMigrator'
|
||||
)
|
||||
)
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
"""
|
||||
This stuff might still be good, but it's out of support.
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
|
|
@ -18,11 +18,10 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|||
from contextvars import ContextVar
|
||||
from typing import Iterable
|
||||
from playhouse.shortcuts import ReconnectMixin
|
||||
from peewee import BigIntegerField, CharField, Database, Field, MySQLDatabase, _ConnectionState
|
||||
from peewee import CharField, Database, MySQLDatabase, _ConnectionState
|
||||
import re
|
||||
|
||||
from suou.iding import Siq
|
||||
from suou.snowflake import Snowflake
|
||||
|
||||
from .codecs import StringCase
|
||||
|
||||
|
|
@ -119,25 +118,5 @@ class SiqField(Field):
|
|||
return Siq.from_bytes(value)
|
||||
|
||||
|
||||
class SnowflakeField(BigIntegerField):
|
||||
'''
|
||||
Field holding a snowflake.
|
||||
|
||||
Stored as bigint.
|
||||
|
||||
XXX UNTESTED!
|
||||
'''
|
||||
field_type = 'bigint'
|
||||
|
||||
def db_value(self, value: int | Snowflake) -> int:
|
||||
if isinstance(value, Snowflake):
|
||||
value = int(value)
|
||||
if not isinstance(value, int):
|
||||
raise TypeError
|
||||
return value
|
||||
def python_value(self, value: int) -> Snowflake:
|
||||
return Snowflake(value)
|
||||
|
||||
# Optional dependency: do not import into __init__.py
|
||||
__all__ = ('connect_reconnect', 'RegexCharField', 'SiqField', 'Snowflake')
|
||||
__all__ = ('connect_reconnect', 'RegexCharField', 'SiqField')
|
||||
|
||||
|
|
|
|||
|
|
@ -1,82 +0,0 @@
|
|||
"""
|
||||
Utilities for Quart, asynchronous successor of Flask
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from quart import current_app, Quart, request, g
|
||||
from quart_schema import QuartSchema
|
||||
|
||||
from suou.http import WantsContentType
|
||||
|
||||
from .i18n import I18n
|
||||
from .itertools import makelist
|
||||
|
||||
def add_i18n(app: Quart, i18n: I18n, var_name: str = 'T', *,
|
||||
query_arg: str = 'lang', default_lang = 'en'):
|
||||
'''
|
||||
Integrate a I18n() object with a Quart application:
|
||||
- set g.lang
|
||||
- add T() to Jinja templates
|
||||
|
||||
XXX UNTESTED
|
||||
'''
|
||||
def _get_lang():
|
||||
lang = request.args.get(query_arg)
|
||||
if not lang:
|
||||
for lp in request.headers.get('accept-language', 'en').split(','):
|
||||
l = lp.split(';')[0]
|
||||
lang = l
|
||||
break
|
||||
else:
|
||||
lang = default_lang
|
||||
return lang
|
||||
|
||||
@app.context_processor
|
||||
def _add_i18n():
|
||||
return {var_name: i18n.lang(_get_lang()).t}
|
||||
|
||||
@app.before_request
|
||||
def _add_language_code():
|
||||
g.lang = _get_lang()
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def negotiate() -> WantsContentType:
|
||||
"""
|
||||
Return an appropriate MIME type for the sake of content negotiation.
|
||||
"""
|
||||
if any(request.path.startswith(f'/{p.strip('/')}/') for p in current_app.config.get('REST_PATHS', [])):
|
||||
return WantsContentType.JSON
|
||||
elif request.user_agent.string.startswith('Mozilla/'):
|
||||
return WantsContentType.HTML
|
||||
else:
|
||||
return request.accept_mimetypes.best_match([WantsContentType.PLAIN, WantsContentType.JSON, WantsContentType.HTML])
|
||||
|
||||
|
||||
def add_rest(app: Quart, *bases: str, **kwargs) -> QuartSchema:
|
||||
"""
|
||||
Construct a REST ...
|
||||
|
||||
The rest of ...
|
||||
"""
|
||||
|
||||
schema = QuartSchema(app, **kwargs)
|
||||
app.config['REST_PATHS'] = makelist(bases, wrap=False)
|
||||
return schema
|
||||
|
||||
|
||||
__all__ = ('add_i18n', 'negotiate', 'add_rest')
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
"""
|
||||
"Security through obscurity" helpers for less sensitive logging
|
||||
|
||||
*New in 0.5.0*
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
|
||||
def redact_url_password(u: str) -> str:
|
||||
"""
|
||||
Remove password from URIs.
|
||||
|
||||
The password part in URIs is:
|
||||
scheme://username:password@hostname/path?query
|
||||
^------^
|
||||
|
||||
*New in 0.5.0*
|
||||
"""
|
||||
return re.sub(r':[^@:/ ]+@', ':***@', u)
|
||||
|
||||
|
||||
__all__ = ('redact_url_password', )
|
||||
152
src/suou/sass.py
152
src/suou/sass.py
|
|
@ -1,152 +0,0 @@
|
|||
"""
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
from typing import Callable, Mapping
|
||||
from sass import CompileError
|
||||
from sassutils.builder import Manifest
|
||||
from importlib.metadata import version as _get_version
|
||||
|
||||
from .codecs import quote_css_string, want_bytes, want_bytes
|
||||
from .validators import must_be
|
||||
from .asgi import _MiddlewareFactory, ASGIApp, ASGIReceive, ASGIScope, ASGISend
|
||||
from . import __version__ as _suou_version
|
||||
|
||||
from pkg_resources import resource_filename
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
## NOTE Python/PSF recommends use of importlib.metadata for version checks.
|
||||
_libsass_version = _get_version('libsass')
|
||||
|
||||
class SassAsyncMiddleware(_MiddlewareFactory):
|
||||
"""
|
||||
ASGI middleware for development purpose.
|
||||
Every time a CSS file has requested it finds a matched
|
||||
Sass/SCSS source file andm then compiled it into CSS.
|
||||
|
||||
Eventual syntax errors are displayed in three ways:
|
||||
- heading CSS comment (i.e. `/* Error: invalid pro*/`)
|
||||
- **red text** in `body::before` (in most cases very evident, since every other
|
||||
style fails to render!)
|
||||
- server-side logging (level is *error*, remember to enable logging!)
|
||||
|
||||
app = ASGI application to wrap
|
||||
manifests = a Mapping of build settings, see sass_manifests= option
|
||||
in `setup.py`
|
||||
|
||||
Shamelessly adapted from libsass==0.23.0 with modifications
|
||||
|
||||
XXX experimental and untested!
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, app: ASGIApp, manifests: Mapping, package_dir = {},
|
||||
error_status = '200 OK'
|
||||
):
|
||||
self.app = must_be(app, Callable, 'app must be a ASGI-compliant callable')
|
||||
self.manifests = Manifest.normalize_manifests(manifests)
|
||||
self.package_dir = dict(must_be(package_dir, Mapping, 'package_dir must be a mapping'))
|
||||
## ???
|
||||
self.error_status = error_status
|
||||
for package_name in self.manifests:
|
||||
if package_name in self.package_dir:
|
||||
continue
|
||||
self.package_dir[package_name] = resource_filename(package_name, '')
|
||||
self.paths: list[tuple[str, str, Manifest]] = []
|
||||
for pkgname, manifest in self.manifests.items():
|
||||
## WSGI path — is it valid for ASGI as well??
|
||||
asgi_path = f'/{manifest.wsgi_path.strip('/')}/'
|
||||
pkg_dir = self.package_dir[pkgname]
|
||||
self.paths.append((asgi_path, pkg_dir, manifest))
|
||||
|
||||
async def __call__(self, /, scope: ASGIScope, receive: ASGIReceive, send: ASGISend):
|
||||
path: str = scope.get('path')
|
||||
if path.endswith('.css'):
|
||||
for prefix, package_dir, manifest in self.paths:
|
||||
if not path.startswith(prefix):
|
||||
continue
|
||||
css_filename = path[len(prefix):]
|
||||
sass_filename = manifest.unresolve_filename(package_dir, css_filename)
|
||||
try:
|
||||
## TODO consider async??
|
||||
result = manifest.build_one(
|
||||
package_dir,
|
||||
sass_filename,
|
||||
source_map=True
|
||||
)
|
||||
except OSError:
|
||||
break
|
||||
except CompileError as e:
|
||||
logger.error(str(e))
|
||||
resp_body = '\n'.join([
|
||||
'/*',
|
||||
str(e),
|
||||
'***',
|
||||
f'libsass {_libsass_version} + suou {_suou_version} {datetime.datetime.now().isoformat()}',
|
||||
'*/',
|
||||
'',
|
||||
'body::before {',
|
||||
f' content: {quote_css_string(str(e))};',
|
||||
' color: maroon;',
|
||||
' background-color: white;',
|
||||
' white-space: pre-wrap;',
|
||||
' display: block;',
|
||||
' font-family: monospace;',
|
||||
' user-select: text;'
|
||||
'}'
|
||||
]).encode('utf-8')
|
||||
|
||||
await send({
|
||||
'type': 'http.response.start',
|
||||
'status': self.error_status,
|
||||
'headers': [
|
||||
(b'Content-Type', b'text/css; charset=utf-8'),
|
||||
(b'Content-Length', want_bytes(f'{len(resp_body)}'))
|
||||
]
|
||||
})
|
||||
await send({
|
||||
'type': 'http.response.body',
|
||||
'body': resp_body
|
||||
})
|
||||
|
||||
return
|
||||
|
||||
async def _read_file(path):
|
||||
with open(path, 'rb') as f:
|
||||
while True:
|
||||
chunk = f.read(4096)
|
||||
if chunk:
|
||||
yield chunk
|
||||
else:
|
||||
break
|
||||
|
||||
file_path = os.path.join(package_dir, result)
|
||||
|
||||
await send({
|
||||
'type': 'http.response.start',
|
||||
'status': 200,
|
||||
'headers': [
|
||||
(b'Content-Type', b'text/css; charset=utf-8'),
|
||||
(b'Content-Length', want_bytes(f'{os.path.getsize(file_path)}'))
|
||||
]
|
||||
})
|
||||
|
||||
resp_body = b''
|
||||
async for chunk in _read_file(file_path):
|
||||
resp_body += chunk
|
||||
|
||||
await send({
|
||||
'type': 'http.response.body',
|
||||
'body': resp_body
|
||||
})
|
||||
|
||||
return
|
||||
|
||||
await self.app(scope, receive, send)
|
||||
|
||||
|
||||
|
|
@ -15,45 +15,30 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|||
"""
|
||||
|
||||
from abc import ABC
|
||||
from base64 import b64decode
|
||||
from typing import Any, Callable, Sequence
|
||||
import warnings
|
||||
from itsdangerous import TimestampSigner
|
||||
|
||||
from itsdangerous import Signer as _Signer
|
||||
from itsdangerous.encoding import int_to_bytes as _int_to_bytes
|
||||
|
||||
from suou.dei import dei_args
|
||||
from suou.itertools import rtuple
|
||||
|
||||
from .functools import not_implemented
|
||||
from .codecs import jsondecode, jsonencode, rb64decode, want_bytes, want_str, b64decode, b64encode
|
||||
from .codecs import jsondecode, jsonencode, want_bytes, want_str
|
||||
from .iding import Siq
|
||||
from .classtools import MISSING
|
||||
|
||||
class UserSigner(TimestampSigner):
|
||||
"""
|
||||
itsdangerous.TimestampSigner() instanced from a user ID, with token generation and validation capabilities.
|
||||
"""
|
||||
user_id: int
|
||||
@dei_args(primary_secret='master_secret')
|
||||
def __init__(self, master_secret: bytes, user_id: int, user_secret: bytes, **kwargs):
|
||||
super().__init__(master_secret + user_secret, salt=Siq(user_id).to_bytes(), **kwargs)
|
||||
self.user_id = user_id
|
||||
def token(self, *, test_timestamp=MISSING) -> str:
|
||||
payload = Siq(self.user_id).to_base64()
|
||||
## The following is not intended for general use
|
||||
if test_timestamp is not MISSING:
|
||||
warnings.warn('timestamp= parameter is intended for testing only!\n\x1b[31mDO NOT use it in production or you might get consequences\x1b[0m, just saying', UserWarning)
|
||||
ts_payload = b64encode(_int_to_bytes(test_timestamp))
|
||||
payload = want_bytes(payload) + want_bytes(self.sep) + want_bytes(ts_payload)
|
||||
return want_str(_Signer.sign(self, payload))
|
||||
## END the following is not intended for general use
|
||||
|
||||
return want_str(self.sign(payload))
|
||||
def token(self) -> str:
|
||||
return self.sign(Siq(self.user_id).to_base64()).decode('ascii')
|
||||
@classmethod
|
||||
def split_token(cls, /, token: str | bytes) :
|
||||
a, b, c = want_str(token).rsplit('.', 2)
|
||||
return b64decode(a), int.from_bytes(b64decode(b), 'big'), b64decode(c)
|
||||
return b64decode(a), b, b64decode(c)
|
||||
def sign_object(self, obj: dict, /, *, encoder=jsonencode, **kwargs):
|
||||
"""
|
||||
Return a signed JSON payload of an object.
|
||||
|
|
@ -69,6 +54,7 @@ class UserSigner(TimestampSigner):
|
|||
def split_signed(self, payload: str | bytes) -> Sequence[bytes]:
|
||||
return rtuple(want_bytes(payload).rsplit(b'.', 2), 3, b'')
|
||||
|
||||
|
||||
class HasSigner(ABC):
|
||||
'''
|
||||
Abstract base class for INTERNAL USE.
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|||
|
||||
|
||||
from __future__ import annotations
|
||||
from binascii import unhexlify
|
||||
import os
|
||||
from threading import Lock
|
||||
import time
|
||||
|
|
@ -29,7 +28,7 @@ import warnings
|
|||
|
||||
from .migrate import SnowflakeSiqMigrator
|
||||
from .iding import SiqType
|
||||
from .codecs import b32ldecode, b32lencode, b64encode, b64decode, cb32encode, cb32decode
|
||||
from .codecs import b32ldecode, b32lencode, b64encode, cb32encode
|
||||
from .functools import deprecated
|
||||
|
||||
|
||||
|
|
@ -122,46 +121,27 @@ class Snowflake(int):
|
|||
|
||||
def to_bytes(self, length: int = 14, byteorder = "big", *, signed: bool = False) -> bytes:
|
||||
return super().to_bytes(length, byteorder, signed=signed)
|
||||
@classmethod
|
||||
def from_bytes(cls, b: bytes, byteorder = 'big', *, signed: bool = False) -> Snowflake:
|
||||
if len(b) not in (8, 10):
|
||||
warnings.warn('Snowflakes are exactly 8 bytes long', BytesWarning)
|
||||
return super().from_bytes(b, byteorder, signed=signed)
|
||||
|
||||
def to_base64(self, length: int = 9, *, strip: bool = True) -> str:
|
||||
return b64encode(self.to_bytes(length), strip=strip)
|
||||
@classmethod
|
||||
def from_base64(cls, val:str) -> Snowflake:
|
||||
return Snowflake.from_bytes(b64decode(val))
|
||||
|
||||
def to_cb32(self)-> str:
|
||||
return cb32encode(self.to_bytes(8, 'big'))
|
||||
to_crockford = to_cb32
|
||||
@classmethod
|
||||
def from_cb32(cls, val:str) -> Snowflake:
|
||||
return Snowflake.from_bytes(cb32decode(val))
|
||||
|
||||
def to_hex(self) -> str:
|
||||
return f'{self:x}'
|
||||
@classmethod
|
||||
def from_hex(cls, val:str) -> Snowflake:
|
||||
if val.startswith('_'):
|
||||
return -cls.from_hex(val.lstrip('_'))
|
||||
return Snowflake.from_bytes(unhexlify(val))
|
||||
|
||||
def to_oct(self) -> str:
|
||||
return f'{self:o}'
|
||||
@classmethod
|
||||
def from_oct(cls, val:str) -> Snowflake:
|
||||
if val.startswith('_'):
|
||||
return -cls.from_hex(val.lstrip('_'))
|
||||
return Snowflake(int(val, base=8))
|
||||
|
||||
def to_b32l(self) -> str:
|
||||
# PSA Snowflake Base32 representations are padded to 10 bytes!
|
||||
if self < 0:
|
||||
return '_' + Snowflake.to_b32l(-self)
|
||||
return b32lencode(self.to_bytes(10, 'big')).lstrip('a')
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, b: bytes, byteorder = 'big', *, signed: bool = False) -> Snowflake:
|
||||
if len(b) not in (8, 10):
|
||||
warnings.warn('Snowflakes are exactly 8 bytes long', BytesWarning)
|
||||
return super().from_bytes(b, byteorder, signed=signed)
|
||||
|
||||
@classmethod
|
||||
def from_b32l(cls, val: str) -> Snowflake:
|
||||
if val.startswith('_'):
|
||||
|
|
@ -169,14 +149,6 @@ class Snowflake(int):
|
|||
return -cls.from_b32l(val.lstrip('_'))
|
||||
return Snowflake.from_bytes(b32ldecode(val.rjust(16, 'a')))
|
||||
|
||||
def to_siq(self, domain: str, epoch: int, target_type: SiqType, **kwargs):
|
||||
"""
|
||||
Convenience method for conversion to SIQ.
|
||||
|
||||
(!) This does not check for existence! Always do the check yourself.
|
||||
"""
|
||||
return SnowflakeSiqMigrator(domain, epoch, **kwargs).to_siq(self, target_type)
|
||||
|
||||
@override
|
||||
def __format__(self, opt: str, /) -> str:
|
||||
try:
|
||||
|
|
@ -207,6 +179,15 @@ class Snowflake(int):
|
|||
def __repr__(self):
|
||||
return f'{self.__class__.__name__}({super().__repr__()})'
|
||||
|
||||
def to_siq(self, domain: str, epoch: int, target_type: SiqType, **kwargs):
|
||||
"""
|
||||
Convenience method for conversion to SIQ.
|
||||
|
||||
(!) This does not check for existence! Always do the check yourself.
|
||||
"""
|
||||
return SnowflakeSiqMigrator(domain, epoch, **kwargs).to_siq(self, target_type)
|
||||
|
||||
|
||||
|
||||
__all__ = (
|
||||
'Snowflake', 'SnowflakeGen'
|
||||
|
|
|
|||
302
src/suou/sqlalchemy.py
Normal file
302
src/suou/sqlalchemy.py
Normal file
|
|
@ -0,0 +1,302 @@
|
|||
"""
|
||||
Utilities for SQLAlchemy
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from functools import wraps
|
||||
from typing import Callable, Iterable, Never, TypeVar
|
||||
import warnings
|
||||
from sqlalchemy import BigInteger, CheckConstraint, Date, Dialect, ForeignKey, LargeBinary, Column, MetaData, SmallInteger, String, create_engine, select, text
|
||||
from sqlalchemy.orm import DeclarativeBase, Session, declarative_base as _declarative_base, relationship
|
||||
|
||||
from .snowflake import SnowflakeGen
|
||||
from .itertools import kwargs_prefix, makelist
|
||||
from .signing import HasSigner, UserSigner
|
||||
from .codecs import StringCase
|
||||
from .functools import deprecated, not_implemented
|
||||
from .iding import Siq, SiqGen, SiqType, SiqCache
|
||||
from .classtools import Incomplete, Wanted
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
# SIQs are 14 bytes long. Storage is padded for alignment
|
||||
# Not to be confused with SiqType.
|
||||
IdType = LargeBinary(16)
|
||||
|
||||
@not_implemented
|
||||
def sql_escape(s: str, /, dialect: Dialect) -> str:
|
||||
"""
|
||||
Escape a value for SQL embedding, using SQLAlchemy's literal processors.
|
||||
Requires a dialect argument.
|
||||
|
||||
XXX this function is not mature yet, do not use
|
||||
"""
|
||||
if isinstance(s, str):
|
||||
return String().literal_processor(dialect=dialect)(s)
|
||||
raise TypeError('invalid data type')
|
||||
|
||||
|
||||
def create_session(url: str) -> Session:
|
||||
"""
|
||||
Create a session on the fly, given a database URL. Useful for
|
||||
contextless environments, such as Python REPL.
|
||||
|
||||
Heads up: a function with the same name exists in core sqlalchemy, but behaves
|
||||
completely differently!!
|
||||
"""
|
||||
engine = create_engine(url)
|
||||
return Session(bind = engine)
|
||||
|
||||
def id_column(typ: SiqType, *, primary_key: bool = True, **kwargs):
|
||||
"""
|
||||
Marks a column which contains a SIQ.
|
||||
"""
|
||||
def new_id_factory(owner: DeclarativeBase) -> Callable:
|
||||
domain_name = owner.metadata.info['domain_name']
|
||||
idgen = SiqCache(SiqGen(domain_name), typ)
|
||||
def new_id() -> bytes:
|
||||
return Siq(idgen.generate()).to_bytes()
|
||||
return new_id
|
||||
if primary_key:
|
||||
return Incomplete(Column, IdType, primary_key = True, default = Wanted(new_id_factory), **kwargs)
|
||||
else:
|
||||
return Incomplete(Column, IdType, unique = True, nullable = False, default = Wanted(new_id_factory), **kwargs)
|
||||
|
||||
def snowflake_column(*, primary_key: bool = True, **kwargs):
|
||||
"""
|
||||
Same as id_column() but with snowflakes.
|
||||
|
||||
XXX this is meant ONLY as means of transition; for new stuff, use id_column() and SIQ.
|
||||
"""
|
||||
def new_id_factory(owner: DeclarativeBase) -> Callable:
|
||||
epoch = owner.metadata.info['snowflake_epoch']
|
||||
# more arguments will be passed on (?)
|
||||
idgen = SnowflakeGen(epoch)
|
||||
def new_id() -> int:
|
||||
return idgen.generate_one()
|
||||
return new_id
|
||||
if primary_key:
|
||||
return Incomplete(Column, BigInteger, primary_key = True, default = Wanted(new_id_factory), **kwargs)
|
||||
else:
|
||||
return Incomplete(Column, BigInteger, unique = True, nullable = False, default = Wanted(new_id_factory), **kwargs)
|
||||
|
||||
|
||||
def match_constraint(col_name: str, regex: str, /, dialect: str = 'default', constraint_name: str | None = None) -> CheckConstraint:
|
||||
"""
|
||||
Shorthand for a check constraint. Several dialects are supported.
|
||||
"""
|
||||
return CheckConstraint(text(match_constraint.TEXT_DIALECTS.get(dialect, match_constraint.TEXT_DIALECTS['default'])).bindparams(n=col_name, re=regex),
|
||||
name=constraint_name)
|
||||
|
||||
match_constraint.TEXT_DIALECTS = {
|
||||
'default': ':n ~ :re',
|
||||
'postgresql': ':n ~ :re',
|
||||
'mariadb': ':n RLIKE :re'
|
||||
}
|
||||
|
||||
def match_column(length: int, regex: str, /, case: StringCase = StringCase.AS_IS, *args, constraint_name: str | None = None, **kwargs):
|
||||
"""
|
||||
Syntactic sugar to create a String() column with a check constraint matching the given regular expression.
|
||||
|
||||
TODO application side validation
|
||||
"""
|
||||
if case != StringCase.AS_IS: # TODO
|
||||
warnings.warn('case arg is currently not working', FutureWarning)
|
||||
return Incomplete(Column, String(length), Wanted(lambda x, n: match_constraint(n, regex, #dialect=x.metadata.engine.dialect.name,
|
||||
constraint_name=constraint_name or f'{x.__tablename__}_{n}_valid')), *args, **kwargs)
|
||||
|
||||
|
||||
def declarative_base(domain_name: str, master_secret: bytes, metadata: dict | None = None, **kwargs):
|
||||
"""
|
||||
Drop-in replacement for sqlalchemy.orm.declarative_base()
|
||||
taking in account requirements for SIQ generation (i.e. domain name).
|
||||
"""
|
||||
if not isinstance(metadata, dict):
|
||||
metadata = dict()
|
||||
if 'info' not in metadata:
|
||||
metadata['info'] = dict()
|
||||
# snowflake metadata
|
||||
snowflake_kwargs = kwargs_prefix(kwargs, 'snowflake_', remove=True, keep_prefix=True)
|
||||
metadata['info'].update(
|
||||
domain_name = domain_name,
|
||||
secret_key = master_secret,
|
||||
**snowflake_kwargs
|
||||
)
|
||||
Base = _declarative_base(metadata=MetaData(**metadata), **kwargs)
|
||||
return Base
|
||||
entity_base = deprecated('use declarative_base() instead')(declarative_base)
|
||||
|
||||
|
||||
def token_signer(id_attr: Column | str, secret_attr: Column | str) -> Incomplete[UserSigner]:
|
||||
"""
|
||||
Generate a user signing function.
|
||||
|
||||
Requires a master secret (taken from Base.metadata), a user id (visible in the token)
|
||||
and a user secret.
|
||||
"""
|
||||
if isinstance(id_attr, Column):
|
||||
id_val = id_attr
|
||||
elif isinstance(id_attr, str):
|
||||
id_val = Wanted(id_attr)
|
||||
if isinstance(secret_attr, Column):
|
||||
secret_val = secret_attr
|
||||
elif isinstance(secret_attr, str):
|
||||
secret_val = Wanted(secret_attr)
|
||||
def token_signer_factory(owner: DeclarativeBase, name: str):
|
||||
def my_signer(self):
|
||||
return UserSigner(owner.metadata.info['secret_key'], id_val.__get__(self, owner), secret_val.__get__(self, owner))
|
||||
my_signer.__name__ = name
|
||||
return my_signer
|
||||
return Incomplete(Wanted(token_signer_factory))
|
||||
|
||||
|
||||
def author_pair(fk_name: str, *, id_type: type = IdType, sig_type: type | None = None, nullable: bool = False, sig_length: int | None = 2048, **ka) -> tuple[Column, Column]:
|
||||
"""
|
||||
Return an owner ID/signature column pair, for authenticated values.
|
||||
"""
|
||||
id_ka = kwargs_prefix(ka, 'id_')
|
||||
sig_ka = kwargs_prefix(ka, 'sig_')
|
||||
id_col = Column(id_type, ForeignKey(fk_name), nullable = nullable, **id_ka)
|
||||
sig_col = Column(sig_type or LargeBinary(sig_length), nullable = nullable, **sig_ka)
|
||||
return (id_col, sig_col)
|
||||
|
||||
|
||||
def age_pair(*, nullable: bool = False, **ka) -> tuple[Column, Column]:
|
||||
"""
|
||||
Return a SIS-compliant age representation, i.e. a date and accuracy pair.
|
||||
|
||||
Accuracy is represented by a small integer:
|
||||
0 = exact
|
||||
1 = month and day
|
||||
2 = year and month
|
||||
3 = year
|
||||
4 = estimated year
|
||||
"""
|
||||
date_ka = kwargs_prefix(ka, 'date_')
|
||||
acc_ka = kwargs_prefix(ka, 'acc_')
|
||||
date_col = Column(Date, nullable = nullable, **date_ka)
|
||||
acc_col = Column(SmallInteger, nullable = nullable, **acc_ka)
|
||||
return (date_col, acc_col)
|
||||
|
||||
|
||||
def parent_children(keyword: str, /, **kwargs):
|
||||
"""
|
||||
Self-referential one-to-many relationship pair.
|
||||
Parent comes first, children come later.
|
||||
|
||||
keyword is used in back_populates column names: convention over
|
||||
configuration. Naming it otherwise will BREAK your models.
|
||||
|
||||
Additional keyword arguments can be sourced with parent_ and child_ argument prefixes,
|
||||
obviously.
|
||||
"""
|
||||
|
||||
parent_kwargs = kwargs_prefix(kwargs, 'parent_')
|
||||
child_kwargs = kwargs_prefix(kwargs, 'child_')
|
||||
|
||||
parent = Incomplete(relationship, Wanted(lambda o, n: o.__name__), back_populates=f'child_{keyword}s', **parent_kwargs)
|
||||
child = Incomplete(relationship, Wanted(lambda o, n: o.__name__), back_populates=f'parent_{keyword}', **child_kwargs)
|
||||
|
||||
return parent, child
|
||||
|
||||
def want_column(cls: type[DeclarativeBase], col: Column[_T] | str) -> Column[_T]:
|
||||
"""
|
||||
Return a table's column given its name.
|
||||
|
||||
XXX does it belong outside any scopes?
|
||||
"""
|
||||
if isinstance(col, Incomplete):
|
||||
raise TypeError('attempt to pass an uninstanced column. Pass the column name as a string instead.')
|
||||
elif isinstance(col, Column):
|
||||
return col
|
||||
elif isinstance(col, str):
|
||||
return getattr(cls, col)
|
||||
else:
|
||||
raise TypeError
|
||||
|
||||
|
||||
class AuthSrc(metaclass=ABCMeta):
|
||||
'''
|
||||
AuthSrc object required for require_auth_base().
|
||||
|
||||
This is an abstract class and is NOT usable directly.
|
||||
'''
|
||||
def required_exc(self) -> Never:
|
||||
raise ValueError('required field missing')
|
||||
def invalid_exc(self, msg: str = 'validation failed') -> Never:
|
||||
raise ValueError(msg)
|
||||
@abstractmethod
|
||||
def get_session(self) -> Session:
|
||||
pass
|
||||
def get_user(self, getter: Callable):
|
||||
return getter(self.get_token())
|
||||
@abstractmethod
|
||||
def get_token(self):
|
||||
pass
|
||||
@abstractmethod
|
||||
def get_signature(self):
|
||||
pass
|
||||
|
||||
|
||||
def require_auth_base(cls: type[DeclarativeBase], *, src: AuthSrc, column: str | Column[_T] = 'id', dest: str = 'user',
|
||||
required: bool = False, signed: bool = False, sig_dest: str = 'signature', validators: Callable | Iterable[Callable] | None = None):
|
||||
'''
|
||||
Inject the current user into a view, given the Authorization: Bearer header.
|
||||
|
||||
For portability reasons, this is a partial, two-component function, requiring a AuthSrc() object.
|
||||
'''
|
||||
col = want_column(cls, column)
|
||||
validators = makelist(validators)
|
||||
|
||||
def get_user(token) -> DeclarativeBase:
|
||||
if token is None:
|
||||
return None
|
||||
tok_parts = UserSigner.split_token(token)
|
||||
user: HasSigner = src.get_session().execute(select(cls).where(col == tok_parts[0])).scalar()
|
||||
try:
|
||||
signer: UserSigner = user.signer()
|
||||
signer.unsign(token)
|
||||
return user
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _default_invalid(msg: str = 'validation failed'):
|
||||
raise ValueError(msg)
|
||||
|
||||
invalid_exc = src.invalid_exc or _default_invalid
|
||||
required_exc = src.required_exc or (lambda: _default_invalid('Login required'))
|
||||
|
||||
def decorator(func: Callable):
|
||||
@wraps(func)
|
||||
def wrapper(*a, **ka):
|
||||
ka[dest] = get_user(src.get_token())
|
||||
if not ka[dest] and required:
|
||||
required_exc()
|
||||
if signed:
|
||||
ka[sig_dest] = src.get_signature()
|
||||
for valid in validators:
|
||||
if not valid(ka[dest]):
|
||||
invalid_exc(getattr(valid, 'message', 'validation failed').format(user=ka[dest]))
|
||||
return func(*a, **ka)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
__all__ = (
|
||||
'IdType', 'id_column', 'entity_base', 'declarative_base', 'token_signer', 'match_column', 'match_constraint',
|
||||
'author_pair', 'age_pair', 'require_auth_base', 'want_column'
|
||||
)
|
||||
|
|
@ -1,175 +0,0 @@
|
|||
"""
|
||||
Utilities for SQLAlchemy.
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from functools import wraps
|
||||
from typing import Any, Callable, Iterable, Never, TypeVar
|
||||
import warnings
|
||||
from sqlalchemy import BigInteger, Boolean, CheckConstraint, Date, Dialect, ForeignKey, LargeBinary, Column, MetaData, SmallInteger, String, create_engine, select, text
|
||||
from sqlalchemy.orm import DeclarativeBase, InstrumentedAttribute, Relationship, Session, declarative_base as _declarative_base, relationship
|
||||
from sqlalchemy.types import TypeEngine
|
||||
|
||||
from ..snowflake import SnowflakeGen
|
||||
from ..itertools import kwargs_prefix, makelist
|
||||
from ..signing import HasSigner, UserSigner
|
||||
from ..codecs import StringCase
|
||||
from ..functools import deprecated, not_implemented
|
||||
from ..iding import Siq, SiqGen, SiqType, SiqCache
|
||||
from ..classtools import Incomplete, Wanted
|
||||
|
||||
|
||||
_T = TypeVar('_T')
|
||||
_U = TypeVar('_U')
|
||||
|
||||
IdType: TypeEngine = LargeBinary(16)
|
||||
"""
|
||||
Database type for SIQ.
|
||||
|
||||
SIQs are 14 bytes long. Storage is padded for alignment
|
||||
Not to be confused with SiqType.
|
||||
"""
|
||||
|
||||
def create_session(url: str) -> Session:
|
||||
"""
|
||||
Create a session on the fly, given a database URL. Useful for
|
||||
contextless environments, such as Python REPL.
|
||||
|
||||
Heads up: a function with the same name exists in core sqlalchemy, but behaves
|
||||
completely differently!!
|
||||
"""
|
||||
engine = create_engine(url)
|
||||
return Session(bind = engine)
|
||||
|
||||
|
||||
def token_signer(id_attr: Column | str, secret_attr: Column | str) -> Incomplete[UserSigner]:
|
||||
"""
|
||||
Generate a user signing function.
|
||||
|
||||
Requires a master secret (taken from Base.metadata), a user id (visible in the token)
|
||||
and a user secret.
|
||||
"""
|
||||
id_val: Column | Wanted[Column]
|
||||
if isinstance(id_attr, Column):
|
||||
id_val = id_attr
|
||||
elif isinstance(id_attr, str):
|
||||
id_val = Wanted(id_attr)
|
||||
if isinstance(secret_attr, Column):
|
||||
secret_val = secret_attr
|
||||
elif isinstance(secret_attr, str):
|
||||
secret_val = Wanted(secret_attr)
|
||||
def token_signer_factory(owner: DeclarativeBase, name: str):
|
||||
def my_signer(self):
|
||||
return UserSigner(
|
||||
owner.metadata.info['secret_key'],
|
||||
id_val.__get__(self, owner), secret_val.__get__(self, owner) # pyright: ignore[reportAttributeAccessIssue]
|
||||
)
|
||||
my_signer.__name__ = name
|
||||
return my_signer
|
||||
return Incomplete(Wanted(token_signer_factory))
|
||||
|
||||
|
||||
## (in)Utilities for use in web apps below
|
||||
|
||||
@deprecated('not part of the public API and not even working. Will be removed in 0.14.0')
|
||||
class AuthSrc(metaclass=ABCMeta):
|
||||
'''
|
||||
AuthSrc object required for require_auth_base().
|
||||
|
||||
This is an abstract class and is NOT usable directly.
|
||||
|
||||
This is not part of the public API
|
||||
|
||||
DEPRECATED
|
||||
'''
|
||||
def required_exc(self) -> Never:
|
||||
raise ValueError('required field missing')
|
||||
def invalid_exc(self, msg: str = 'validation failed') -> Never:
|
||||
raise ValueError(msg)
|
||||
@abstractmethod
|
||||
def get_session(self) -> Session:
|
||||
pass
|
||||
def get_user(self, getter: Callable):
|
||||
return getter(self.get_token())
|
||||
@abstractmethod
|
||||
def get_token(self):
|
||||
pass
|
||||
@abstractmethod
|
||||
def get_signature(self):
|
||||
pass
|
||||
|
||||
|
||||
@deprecated('not working and too complex to use. Will be removed in 0.14.0')
|
||||
def require_auth_base(cls: type[DeclarativeBase], *, src: AuthSrc, column: str | Column[_T] = 'id', dest: str = 'user',
|
||||
required: bool = False, signed: bool = False, sig_dest: str = 'signature', validators: Callable | Iterable[Callable] | None = None):
|
||||
'''
|
||||
Inject the current user into a view, given the Authorization: Bearer header.
|
||||
|
||||
For portability reasons, this is a partial, two-component function, requiring a AuthSrc() object.
|
||||
'''
|
||||
col = want_column(cls, column)
|
||||
validators = makelist(validators)
|
||||
|
||||
def get_user(token) -> DeclarativeBase:
|
||||
if token is None:
|
||||
return None
|
||||
tok_parts = UserSigner.split_token(token)
|
||||
user: HasSigner = src.get_session().execute(select(cls).where(col == tok_parts[0])).scalar()
|
||||
try:
|
||||
signer: UserSigner = user.signer()
|
||||
signer.unsign(token)
|
||||
return user
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _default_invalid(msg: str = 'Validation failed'):
|
||||
raise ValueError(msg)
|
||||
|
||||
invalid_exc = src.invalid_exc or _default_invalid
|
||||
required_exc = src.required_exc or (lambda: _default_invalid('Login required'))
|
||||
|
||||
def decorator(func: Callable[_T, _U]) -> Callable[_T, _U]:
|
||||
@wraps(func)
|
||||
def wrapper(*a, **ka):
|
||||
ka[dest] = get_user(src.get_token())
|
||||
if not ka[dest] and required:
|
||||
required_exc()
|
||||
if signed:
|
||||
ka[sig_dest] = src.get_signature()
|
||||
for valid in validators:
|
||||
if not valid(ka[dest]):
|
||||
invalid_exc(getattr(valid, 'message', 'validation failed').format(user=ka[dest]))
|
||||
return func(*a, **ka)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
from .asyncio import SQLAlchemy, AsyncSelectPagination, async_query
|
||||
from .orm import (
|
||||
id_column, snowflake_column, match_column, match_constraint, bool_column, declarative_base, parent_children,
|
||||
author_pair, age_pair, bound_fk, unbound_fk, want_column, a_relationship, BitSelector, secret_column, username_column
|
||||
)
|
||||
|
||||
# Optional dependency: do not import into __init__.py
|
||||
__all__ = (
|
||||
'IdType', 'id_column', 'snowflake_column', 'entity_base', 'declarative_base', 'token_signer',
|
||||
'match_column', 'match_constraint', 'bool_column', 'parent_children',
|
||||
'author_pair', 'age_pair', 'bound_fk', 'unbound_fk', 'want_column',
|
||||
'a_relationship', 'BitSelector', 'secret_column', 'username_column',
|
||||
# .asyncio
|
||||
'SQLAlchemy', 'AsyncSelectPagination', 'async_query', 'SessionWrapper'
|
||||
)
|
||||
|
|
@ -1,261 +0,0 @@
|
|||
|
||||
"""
|
||||
Helpers for asynchronous use of SQLAlchemy.
|
||||
|
||||
*New in 0.5.0; moved to current location in 0.6.0*
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from functools import wraps
|
||||
|
||||
from contextvars import ContextVar, Token
|
||||
from typing import Callable, TypeVar
|
||||
from sqlalchemy import Select, Table, func, select
|
||||
from sqlalchemy.orm import DeclarativeBase, lazyload
|
||||
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine
|
||||
from flask_sqlalchemy.pagination import Pagination
|
||||
|
||||
from suou.exceptions import NotFoundError
|
||||
from suou.glue import glue
|
||||
|
||||
_T = TypeVar('_T')
|
||||
_U = TypeVar('_U')
|
||||
|
||||
class SQLAlchemy:
|
||||
"""
|
||||
Drop-in (in fact, almost) replacement for flask_sqlalchemy.SQLAlchemy()
|
||||
eligible for async environments.
|
||||
|
||||
Notable changes:
|
||||
+ You have to create the session yourself. Easiest use case:
|
||||
|
||||
async def handler (userid):
|
||||
async with db as session:
|
||||
# do something
|
||||
user = (await session.execute(select(User).where(User.id == userid))).scalar()
|
||||
# ...
|
||||
|
||||
*New in 0.5.0*
|
||||
|
||||
*Changed in 0.6.0*: added wrap=True
|
||||
|
||||
*Changed in 0.6.1*: expire_on_commit is now configurable per-SQLAlchemy();
|
||||
now sessions are stored as context variables
|
||||
|
||||
*Changed in 0.11.0*: sessions are now wrapped by default; turn it off by instantiating it with wrap=False
|
||||
"""
|
||||
base: DeclarativeBase
|
||||
engine: AsyncEngine
|
||||
_session_tok: list[Token[AsyncSession]]
|
||||
_wrapsessions: bool | None
|
||||
_xocommit: bool | None
|
||||
NotFound = NotFoundError
|
||||
|
||||
def __init__(self, model_class: DeclarativeBase, *, expire_on_commit = False, wrap = True):
|
||||
self.base = model_class
|
||||
self.engine = None
|
||||
self._wrapsessions = wrap
|
||||
self._xocommit = expire_on_commit
|
||||
def bind(self, url: str):
|
||||
self.engine = create_async_engine(url)
|
||||
def _ensure_engine(self):
|
||||
if self.engine is None:
|
||||
raise RuntimeError('database is not connected')
|
||||
async def begin(self, *, expire_on_commit = None, wrap = None, **kw) -> AsyncSession:
|
||||
self._ensure_engine()
|
||||
## XXX is it accurate?
|
||||
s = AsyncSession(self.engine,
|
||||
expire_on_commit=expire_on_commit if expire_on_commit is not None else self._xocommit,
|
||||
**kw)
|
||||
if (wrap if wrap is not None else self._wrapsessions):
|
||||
s = SessionWrapper(s)
|
||||
current_session.set(s)
|
||||
return s
|
||||
async def __aenter__(self) -> AsyncSession:
|
||||
return await self.begin()
|
||||
async def __aexit__(self, e1, e2, e3):
|
||||
## XXX is it accurate?
|
||||
s = current_session.get()
|
||||
if not s:
|
||||
raise RuntimeError('session not closed')
|
||||
if e1:
|
||||
await s.rollback()
|
||||
else:
|
||||
await s.commit()
|
||||
await s.close()
|
||||
async def paginate(self, select: Select, *,
|
||||
page: int | None = None, per_page: int | None = None,
|
||||
max_per_page: int | None = None, error_out: bool = True,
|
||||
count: bool = True) -> AsyncSelectPagination:
|
||||
"""
|
||||
Return a pagination. Analogous to flask_sqlalchemy.SQLAlchemy.paginate().
|
||||
"""
|
||||
async with self as session:
|
||||
return AsyncSelectPagination(
|
||||
select = select,
|
||||
session = session,
|
||||
page = page,
|
||||
per_page=per_page, max_per_page=max_per_page,
|
||||
error_out=self.NotFound if error_out else None, count=count
|
||||
)
|
||||
async def create_all(self, *, checkfirst = True):
|
||||
"""
|
||||
Initialize database
|
||||
"""
|
||||
self._ensure_engine()
|
||||
self.base.metadata.create_all(
|
||||
self.engine, checkfirst=checkfirst
|
||||
)
|
||||
|
||||
# XXX NOT public API! DO NOT USE
|
||||
current_session: ContextVar[AsyncSession] = ContextVar('current_session')
|
||||
|
||||
|
||||
|
||||
|
||||
class AsyncSelectPagination(Pagination):
|
||||
"""
|
||||
flask_sqlalchemy.SelectPagination but asynchronous.
|
||||
|
||||
Pagination is not part of the public API, therefore expect that it may break
|
||||
"""
|
||||
|
||||
async def _query_items(self) -> list:
|
||||
select_q: Select = self._query_args["select"]
|
||||
select = select_q.limit(self.per_page).offset(self._query_offset)
|
||||
session: AsyncSession = self._query_args["session"]
|
||||
out = (await session.execute(select)).scalars()
|
||||
return out
|
||||
|
||||
async def _query_count(self) -> int:
|
||||
select_q: Select = self._query_args["select"]
|
||||
sub = select_q.options(lazyload("*")).order_by(None).subquery()
|
||||
session: AsyncSession = self._query_args["session"]
|
||||
out = (await session.execute(select(func.count()).select_from(sub))).scalar()
|
||||
return out
|
||||
|
||||
def __init__(self,
|
||||
page: int | None = None,
|
||||
per_page: int | None = None,
|
||||
max_per_page: int | None = 100,
|
||||
error_out: Exception | None = NotFoundError,
|
||||
count: bool = True,
|
||||
**kwargs):
|
||||
## XXX flask-sqlalchemy says Pagination() is not public API.
|
||||
## Things may break; beware.
|
||||
self._query_args = kwargs
|
||||
page, per_page = self._prepare_page_args(
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
max_per_page=max_per_page,
|
||||
error_out=error_out,
|
||||
)
|
||||
|
||||
self.page: int = page
|
||||
"""The current page."""
|
||||
|
||||
self.per_page: int = per_page
|
||||
"""The maximum number of items on a page."""
|
||||
|
||||
self.max_per_page: int | None = max_per_page
|
||||
"""The maximum allowed value for ``per_page``."""
|
||||
|
||||
self.items = None
|
||||
self.total = None
|
||||
self.error_out = error_out
|
||||
self.has_count = count
|
||||
|
||||
async def __aiter__(self):
|
||||
self.items = await self._query_items()
|
||||
if self.items is None:
|
||||
raise RuntimeError('query returned None')
|
||||
if not self.items and self.page != 1 and self.error_out:
|
||||
raise self.error_out
|
||||
if self.has_count:
|
||||
self.total = await self._query_count()
|
||||
for i in self.items:
|
||||
yield i
|
||||
|
||||
|
||||
|
||||
def async_query(db: SQLAlchemy, multi: False):
|
||||
"""
|
||||
Wraps a query returning function into an executor coroutine.
|
||||
|
||||
The query function remains available as the .q or .query attribute.
|
||||
"""
|
||||
def decorator(func: Callable[_T, _U]) -> Callable[_T, _U]:
|
||||
@wraps(func)
|
||||
async def executor(*args, **kwargs):
|
||||
async with db as session:
|
||||
result = await session.execute(func(*args, **kwargs))
|
||||
return result.scalars() if multi else result.scalar()
|
||||
executor.query = executor.q = func
|
||||
return executor
|
||||
return decorator
|
||||
|
||||
class SessionWrapper:
|
||||
"""
|
||||
Wrap a SQLAlchemy() session (context manager) adding several QoL utilitites.
|
||||
|
||||
It can be applied to:
|
||||
+ sessions created by SQLAlchemy() - it must receive a wrap=True argument in constructor;
|
||||
+ sessions created manually - by constructing SessionWrapper(session).
|
||||
|
||||
This works in async context; DO NOT USE with regular SQLAlchemy.
|
||||
|
||||
NEW 0.6.0
|
||||
"""
|
||||
|
||||
def __init__(self, db_or_session: SQLAlchemy | AsyncSession):
|
||||
self._wrapped = db_or_session
|
||||
async def __aenter__(self):
|
||||
if isinstance(self._wrapped, SQLAlchemy):
|
||||
self._wrapped = await self._wrapped.begin()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *exc_info):
|
||||
await self._wrapped.__aexit__(*exc_info)
|
||||
|
||||
@property
|
||||
def _session(self):
|
||||
if isinstance(self._wrapped, AsyncSession):
|
||||
return self._wrapped
|
||||
raise RuntimeError('active session is required')
|
||||
|
||||
async def get_one(self, query: Select):
|
||||
result = await self._session.execute(query)
|
||||
return result.scalar()
|
||||
|
||||
async def get_by_id(self, table: Table, key) :
|
||||
return await self.get_one(select(table).where(table.id == key)) # pyright: ignore[reportAttributeAccessIssue]
|
||||
|
||||
async def get_list(self, query: Select, limit: int | None = None):
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
result = await self._session.execute(query)
|
||||
return list(result.scalars())
|
||||
|
||||
def __getattr__(self, key):
|
||||
"""
|
||||
Fall back to the wrapped session
|
||||
"""
|
||||
return getattr(self._session, key)
|
||||
|
||||
def __del__(self):
|
||||
self._session.close()
|
||||
|
||||
# Optional dependency: do not import into __init__.py
|
||||
__all__ = ('SQLAlchemy', 'AsyncSelectPagination', 'async_query', 'SessionWrapper')
|
||||
|
|
@ -1,344 +0,0 @@
|
|||
"""
|
||||
Utilities for SQLAlchemy; ORM
|
||||
|
||||
*New in 0.6.0 (moved)*
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
from binascii import Incomplete
|
||||
import os
|
||||
import re
|
||||
from typing import Any, Callable, TypeVar
|
||||
import warnings
|
||||
from sqlalchemy import BigInteger, Boolean, CheckConstraint, Column, Date, ForeignKey, LargeBinary, MetaData, SmallInteger, String, text
|
||||
from sqlalchemy.orm import DeclarativeBase, InstrumentedAttribute, Relationship, declarative_base as _declarative_base, relationship
|
||||
from sqlalchemy.types import TypeEngine
|
||||
from sqlalchemy.ext.hybrid import Comparator
|
||||
from suou.functools import future
|
||||
from suou.classtools import Wanted, Incomplete
|
||||
from suou.codecs import StringCase
|
||||
from suou.dei import dei_args
|
||||
from suou.iding import Siq, SiqCache, SiqGen, SiqType
|
||||
from suou.itertools import kwargs_prefix
|
||||
from suou.snowflake import SnowflakeGen
|
||||
from suou.sqlalchemy import IdType
|
||||
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
|
||||
def want_column(cls: type[DeclarativeBase], col: Column[_T] | str) -> Column[_T]:
|
||||
"""
|
||||
Return a table's column given its name.
|
||||
|
||||
XXX does it belong outside any scopes?
|
||||
"""
|
||||
if isinstance(col, Incomplete):
|
||||
raise TypeError('attempt to pass an uninstanced column. Pass the column name as a string instead.')
|
||||
elif isinstance(col, Column):
|
||||
return col
|
||||
elif isinstance(col, str):
|
||||
return getattr(cls, col)
|
||||
else:
|
||||
raise TypeError
|
||||
|
||||
|
||||
def id_column(typ: SiqType, *, primary_key: bool = True, **kwargs):
|
||||
"""
|
||||
Marks a column which contains a SIQ.
|
||||
"""
|
||||
def new_id_factory(owner: DeclarativeBase) -> Callable:
|
||||
domain_name = owner.metadata.info['domain_name']
|
||||
idgen = SiqCache(SiqGen(domain_name), typ)
|
||||
def new_id() -> bytes:
|
||||
return Siq(idgen.generate()).to_bytes()
|
||||
return new_id
|
||||
if primary_key:
|
||||
return Incomplete(Column, IdType, primary_key = True, default = Wanted(new_id_factory), **kwargs)
|
||||
else:
|
||||
return Incomplete(Column, IdType, unique = True, nullable = False, default = Wanted(new_id_factory), **kwargs)
|
||||
|
||||
def snowflake_column(*, primary_key: bool = True, **kwargs):
|
||||
"""
|
||||
Same as id_column() but with snowflakes.
|
||||
|
||||
XXX this is meant ONLY as means of transition; for new stuff, use id_column() and SIQ.
|
||||
"""
|
||||
def new_id_factory(owner: DeclarativeBase) -> Callable:
|
||||
epoch = owner.metadata.info['snowflake_epoch']
|
||||
# more arguments will be passed on (?)
|
||||
idgen = SnowflakeGen(epoch)
|
||||
def new_id() -> int:
|
||||
return idgen.generate_one()
|
||||
return new_id
|
||||
if primary_key:
|
||||
return Incomplete(Column, BigInteger, primary_key = True, default = Wanted(new_id_factory), **kwargs)
|
||||
else:
|
||||
return Incomplete(Column, BigInteger, unique = True, nullable = False, default = Wanted(new_id_factory), **kwargs)
|
||||
|
||||
|
||||
def match_constraint(col_name: str, regex: str, /, dialect: str = 'default', constraint_name: str | None = None) -> CheckConstraint:
|
||||
"""
|
||||
Shorthand for a check constraint. Several dialects are supported.
|
||||
"""
|
||||
return CheckConstraint(text(match_constraint.TEXT_DIALECTS.get(dialect, match_constraint.TEXT_DIALECTS['default'])).bindparams(n=col_name, re=regex),
|
||||
name=constraint_name)
|
||||
|
||||
match_constraint.TEXT_DIALECTS = {
|
||||
'default': ':n ~ :re',
|
||||
'postgresql': ':n ~ :re',
|
||||
'mariadb': ':n RLIKE :re'
|
||||
}
|
||||
|
||||
def match_column(length: int, regex: str | re.Pattern, /, case: StringCase = StringCase.AS_IS, *args, constraint_name: str | None = None, **kwargs) -> Incomplete[Column[str]]:
|
||||
"""
|
||||
Syntactic sugar to create a String() column with a check constraint matching the given regular expression.
|
||||
|
||||
TODO application side validation
|
||||
"""
|
||||
if case != StringCase.AS_IS: # TODO
|
||||
warnings.warn('case arg is currently not working', FutureWarning)
|
||||
return Incomplete(Column, String(length), Wanted(lambda x, n: match_constraint(n, regex, #dialect=x.metadata.engine.dialect.name,
|
||||
constraint_name=constraint_name or f'{x.__tablename__}_{n}_valid')), *args, **kwargs)
|
||||
|
||||
|
||||
def username_column(
|
||||
length: int = 32, regex: str | re.Pattern = '[a-z_][a-z0-9_-]+', *args, case: StringCase = StringCase.LOWER,
|
||||
nullable : bool = False, **kwargs) -> Incomplete[Column[str] | Column[str | None]]:
|
||||
"""
|
||||
Construct a column containing a unique handle / username.
|
||||
|
||||
Username must match the given `regex` and be at most `length` characters long.
|
||||
|
||||
*New in 0.8.0*
|
||||
"""
|
||||
if case is StringCase.AS_IS:
|
||||
warnings.warn('case sensitive usernames may lead to impersonation and unexpected behavior', UserWarning)
|
||||
|
||||
return match_column(length, regex, case=case, nullable=nullable, unique=True, *args, **kwargs)
|
||||
|
||||
|
||||
def bool_column(value: bool = False, nullable: bool = False, **kwargs) -> Column[bool]:
|
||||
"""
|
||||
Column for a single boolean value.
|
||||
|
||||
*New in 0.4.0*
|
||||
"""
|
||||
def_val = text('true') if value else text('false')
|
||||
return Column(Boolean, server_default=def_val, nullable=nullable, **kwargs)
|
||||
|
||||
|
||||
@dei_args(primary_secret='master_secret')
|
||||
def declarative_base(domain_name: str, master_secret: bytes, metadata: dict | None = None, **kwargs) -> type[DeclarativeBase]:
|
||||
"""
|
||||
Drop-in replacement for sqlalchemy.orm.declarative_base()
|
||||
taking in account requirements for SIQ generation (i.e. domain name).
|
||||
"""
|
||||
if not isinstance(metadata, dict):
|
||||
metadata = dict()
|
||||
if 'info' not in metadata:
|
||||
metadata['info'] = dict()
|
||||
# snowflake metadata
|
||||
snowflake_kwargs = kwargs_prefix(kwargs, 'snowflake_', remove=True, keep_prefix=True)
|
||||
metadata['info'].update(
|
||||
domain_name = domain_name,
|
||||
secret_key = master_secret,
|
||||
**snowflake_kwargs
|
||||
)
|
||||
Base = _declarative_base(metadata=MetaData(**metadata), **kwargs)
|
||||
return Base
|
||||
entity_base = warnings.deprecated('use declarative_base() instead')(declarative_base)
|
||||
|
||||
|
||||
|
||||
def author_pair(fk_name: str, *, id_type: type | TypeEngine = IdType, sig_type: type | None = None, nullable: bool = False, sig_length: int | None = 2048, **ka) -> tuple[Column, Column]:
|
||||
"""
|
||||
Return an owner ID/signature column pair, for authenticated values.
|
||||
"""
|
||||
id_ka = kwargs_prefix(ka, 'id_')
|
||||
sig_ka = kwargs_prefix(ka, 'sig_')
|
||||
id_col = Column(id_type, ForeignKey(fk_name), nullable = nullable, **id_ka)
|
||||
sig_col = Column(sig_type or LargeBinary(sig_length), nullable = nullable, **sig_ka)
|
||||
return (id_col, sig_col)
|
||||
|
||||
|
||||
def age_pair(*, nullable: bool = False, **ka) -> tuple[Column, Column]:
|
||||
"""
|
||||
Return a SIS-compliant age representation, i.e. a date and accuracy pair.
|
||||
|
||||
Accuracy is represented by a small integer:
|
||||
0 = exact
|
||||
1 = month and day
|
||||
2 = year and month
|
||||
3 = year
|
||||
4 = estimated year
|
||||
"""
|
||||
date_ka = kwargs_prefix(ka, 'date_')
|
||||
acc_ka = kwargs_prefix(ka, 'acc_')
|
||||
date_col = Column(Date, nullable = nullable, **date_ka)
|
||||
acc_col = Column(SmallInteger, nullable = nullable, **acc_ka)
|
||||
return (date_col, acc_col)
|
||||
|
||||
|
||||
def secret_column(length: int = 64, max_length: int | None = None, gen: Callable[[int], bytes] = os.urandom, nullable=False, **kwargs):
|
||||
"""
|
||||
Column filled in by default with random bits (64 by default). Useful for secrets.
|
||||
|
||||
*New in 0.6.0*
|
||||
"""
|
||||
max_length = max_length or length
|
||||
return Column(LargeBinary(max_length), default=lambda: gen(length), nullable=nullable, **kwargs)
|
||||
|
||||
|
||||
|
||||
def parent_children(keyword: str, /, *, lazy='selectin', **kwargs) -> tuple[Incomplete[Relationship[Any]], Incomplete[Relationship[Any]]]:
|
||||
"""
|
||||
Self-referential one-to-many relationship pair.
|
||||
Parent comes first, children come later.
|
||||
|
||||
keyword is used in back_populates column names: convention over
|
||||
configuration. Naming it otherwise will BREAK your models.
|
||||
|
||||
Additional keyword arguments can be sourced with parent_ and child_ argument prefixes,
|
||||
obviously.
|
||||
|
||||
*Changed in 0.5.0*: the both relationship()s use lazy='selectin' attribute now by default.
|
||||
"""
|
||||
|
||||
parent_kwargs = kwargs_prefix(kwargs, 'parent_')
|
||||
child_kwargs = kwargs_prefix(kwargs, 'child_')
|
||||
|
||||
parent: Incomplete[Relationship[Any]] = Incomplete(relationship, Wanted(lambda o, n: o.__name__), back_populates=f'child_{keyword}s', lazy=lazy, **parent_kwargs)
|
||||
child: Incomplete[Relationship[Any]] = Incomplete(relationship, Wanted(lambda o, n: o.__name__), back_populates=f'parent_{keyword}', lazy=lazy, **child_kwargs)
|
||||
|
||||
return parent, child
|
||||
|
||||
|
||||
def a_relationship(primary = None, /, j=None, *, lazy='selectin', **kwargs):
|
||||
"""
|
||||
Shorthand for relationship() that sets lazy='selectin' by default.
|
||||
|
||||
*New in 0.6.0*
|
||||
"""
|
||||
if j:
|
||||
kwargs['primaryjoin'] = j
|
||||
return relationship(primary, lazy=lazy, **kwargs) # pyright: ignore[reportArgumentType]
|
||||
|
||||
|
||||
def unbound_fk(target: str | Column | InstrumentedAttribute, typ: _T | None = None, **kwargs) -> Column[_T | IdType]:
|
||||
"""
|
||||
Shorthand for creating a "unbound" foreign key column from a column name, the referenced column.
|
||||
|
||||
"Unbound" foreign keys are nullable and set to null when referenced object is deleted.
|
||||
|
||||
If target is a string, make sure to pass the column type at typ= (default: IdType aka varbinary(16))!
|
||||
|
||||
*New in 0.5.0*
|
||||
"""
|
||||
if isinstance(target, (Column, InstrumentedAttribute)):
|
||||
target_name = f'{target.table.name}.{target.name}'
|
||||
typ = target.type
|
||||
elif isinstance(target, str):
|
||||
target_name = target
|
||||
if typ is None:
|
||||
typ = IdType
|
||||
else:
|
||||
raise TypeError('target must be a str, a Column or a InstrumentedAttribute')
|
||||
|
||||
return Column(typ, ForeignKey(target_name, ondelete='SET NULL'), nullable=True, **kwargs)
|
||||
|
||||
def bound_fk(target: str | Column | InstrumentedAttribute, typ: _T = None, **kwargs) -> Column[_T | IdType]:
|
||||
"""
|
||||
Shorthand for creating a "bound" foreign key column from a column name, the referenced column.
|
||||
|
||||
"Bound" foreign keys are not nullable and cascade when referenced object is deleted. It means,
|
||||
parent deleted -> all children deleted.
|
||||
|
||||
If target is a string, make sure to pass the column type at typ= (default: IdType aka varbinary(16))!
|
||||
|
||||
*New in 0.5.0*
|
||||
"""
|
||||
if isinstance(target, (Column, InstrumentedAttribute)):
|
||||
target_name = f'{target.table.name}.{target.name}'
|
||||
typ = target.type
|
||||
elif isinstance(target, str):
|
||||
target_name = target
|
||||
if typ is None:
|
||||
typ = IdType
|
||||
else:
|
||||
raise TypeError('target must be a str, a Column or a InstrumentedAttribute')
|
||||
|
||||
return Column(typ, ForeignKey(target_name, ondelete='CASCADE'), nullable=False, **kwargs)
|
||||
|
||||
|
||||
class _BitComparator(Comparator):
|
||||
"""
|
||||
Comparator object for BitSelector()
|
||||
|
||||
*New in 0.6.0*
|
||||
"""
|
||||
_column: Column
|
||||
_flag: int
|
||||
def __init__(self, col, flag):
|
||||
self._column = col
|
||||
self._flag = flag
|
||||
def _bulk_update_tuples(self, value):
|
||||
return [ (self._column, self._upd_exp(value)) ]
|
||||
def operate(self, op, other, **kwargs):
|
||||
return op(self._sel_exp(), self._flag if other else 0, **kwargs)
|
||||
def __clause_element__(self):
|
||||
return self._column
|
||||
def __str__(self):
|
||||
return self._column
|
||||
def _sel_exp(self):
|
||||
return self._column.op('&')(self._flag)
|
||||
def _upd_exp(self, value):
|
||||
return self._column.op('|')(self._flag) if value else self._column.op('&')(~self._flag)
|
||||
|
||||
class BitSelector:
|
||||
"""
|
||||
"Virtual" column representing a single bit in an integer column (usually a BigInteger).
|
||||
|
||||
Mimicks peewee's 'BitField()' behavior, with SQLAlchemy.
|
||||
|
||||
*New in 0.6.0*
|
||||
"""
|
||||
_column: Column
|
||||
_flag: int
|
||||
_name: str
|
||||
def __init__(self, column, flag: int):
|
||||
if bin(flag := int(flag))[2:].rstrip('0') != '1':
|
||||
warnings.warn('using non-powers of 2 as flags may cause errors or undefined behavior', FutureWarning)
|
||||
self._column = column
|
||||
self._flag = flag
|
||||
def __set_name__(self, name, owner=None):
|
||||
self._name = name
|
||||
def __get__(self, obj, objtype=None):
|
||||
if obj:
|
||||
return getattr(obj, self._column.name) & self._flag > 0
|
||||
else:
|
||||
return _BitComparator(self._column, self._flag)
|
||||
def __set__(self, obj, val):
|
||||
if obj:
|
||||
orig = getattr(obj, self._column.name)
|
||||
if val:
|
||||
orig |= self._flag
|
||||
else:
|
||||
orig &= ~(self._flag)
|
||||
setattr(obj, self._column.name, orig)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
"""
|
||||
Helpers for asynchronous use of SQLAlchemy.
|
||||
|
||||
*New in 0.5.0; moved to ``sqlalchemy.asyncio`` in 0.6.0*
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .functools import deprecated
|
||||
|
||||
|
||||
|
||||
from .sqlalchemy.asyncio import SQLAlchemy, AsyncSelectPagination, async_query
|
||||
|
||||
SQLAlchemy = deprecated('import from suou.sqlalchemy.asyncio instead')(SQLAlchemy)
|
||||
AsyncSelectPagination = deprecated('import from suou.sqlalchemy.asyncio instead')(AsyncSelectPagination)
|
||||
async_query = deprecated('import from suou.sqlalchemy.asyncio instead')(async_query)
|
||||
|
||||
# Optional dependency: do not import into __init__.py
|
||||
__all__ = ('SQLAlchemy', 'AsyncSelectPagination', 'async_query')
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
"""
|
||||
Utilities for string manipulation.
|
||||
|
||||
Why `strtools`? Why not `string`? I just~ happen to not like it
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
|
||||
from typing import Callable, Iterable
|
||||
from pydantic import validate_call
|
||||
|
||||
from .itertools import makelist
|
||||
|
||||
class PrefixIdentifier:
|
||||
_prefix: str
|
||||
|
||||
def __init__(self, prefix: str | None, validators: Iterable[Callable[[str], bool]] | Callable[[str], bool] | None = None):
|
||||
prefix = '' if prefix is None else prefix
|
||||
if not isinstance(prefix, str):
|
||||
raise TypeError
|
||||
validators = makelist(validators, wrap=False)
|
||||
for validator in validators:
|
||||
if not validator(prefix):
|
||||
raise ValueError('invalid prefix')
|
||||
self._prefix = prefix
|
||||
|
||||
@validate_call()
|
||||
def __getattr__(self, key: str):
|
||||
return f'{self._prefix}{key}'
|
||||
|
||||
@validate_call()
|
||||
def __getitem__(self, key: str) -> str:
|
||||
return f'{self._prefix}{key}'
|
||||
|
||||
def __str__(self):
|
||||
return f'{self._prefix}'
|
||||
|
||||
|
||||
class SpitText:
|
||||
"""
|
||||
A formatter for pre-compiled strings.
|
||||
|
||||
*New in 0.11.0*
|
||||
"""
|
||||
|
||||
def format(self, templ: str, *attrs: Iterable[str]) -> str:
|
||||
attrs = [getattr(self, attr, f'{{{{ {attr} }}}}') for attr in attrs]
|
||||
return templ.format(*attrs).strip()
|
||||
|
||||
|
||||
__all__ = ('PrefixIdentifier',)
|
||||
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
"""
|
||||
Utilities for console I/O and text user interfaces (TUI)
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import annotations
|
||||
from functools import wraps
|
||||
import sys
|
||||
from suou.exceptions import TerminalRequiredError
|
||||
|
||||
|
||||
def terminal_required(func):
|
||||
"""
|
||||
Requires the decorated callable to be fully connected to a terminal.
|
||||
|
||||
*New in 0.7.0*
|
||||
"""
|
||||
@wraps(func)
|
||||
def wrapper(*a, **ka):
|
||||
if not (sys.stdin.isatty() and sys.stdout.isatty() and sys.stderr.isatty()):
|
||||
raise TerminalRequiredError('this program must be run from a terminal')
|
||||
return func(*a, **ka)
|
||||
return wrapper
|
||||
|
||||
__all__ = ('terminal_required',)
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
Miscellaneous validator closures.
|
||||
Utilities for marshmallow, a schema-agnostic serializer/deserializer.
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -16,15 +16,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|||
|
||||
import re
|
||||
|
||||
from typing import Any, Iterable, TypeVar
|
||||
|
||||
from suou.classtools import MISSING
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
def matches(regex: str | int, /, length: int = 0, *, flags=0):
|
||||
"""
|
||||
Return a function which returns True if X is shorter than length and matches the given regex.
|
||||
Return a function which returns true if X is shorter than length and matches the given regex.
|
||||
"""
|
||||
if isinstance(regex, int):
|
||||
length = regex
|
||||
|
|
@ -34,47 +28,4 @@ def matches(regex: str | int, /, length: int = 0, *, flags=0):
|
|||
return validator
|
||||
|
||||
|
||||
def must_be(obj: _T | Any, typ: type[_T] | Iterable[type], message: str, *, exc = TypeError) -> _T:
|
||||
"""
|
||||
Raise TypeError if the requested object is not of the desired type(s), with a nice message.
|
||||
|
||||
(Not properly a validator.)
|
||||
"""
|
||||
if not isinstance(obj, typ):
|
||||
raise TypeError(f'{message}, not {obj.__class__.__name__!r}')
|
||||
return obj
|
||||
|
||||
|
||||
def not_greater_than(y):
|
||||
"""
|
||||
Return a function that returns True if X is not greater than (i.e. lesser than or equal to) the given value.
|
||||
"""
|
||||
return lambda x: x <= y
|
||||
|
||||
def not_less_than(y):
|
||||
"""
|
||||
Return a function that returns True if X is not less than (i.e. greater than or equal to) the given value.
|
||||
"""
|
||||
return lambda x: x >= y
|
||||
|
||||
def yesno(x: str | int | bool | None) -> bool:
|
||||
"""
|
||||
Returns False if x.lower() is in '0', '', 'no', 'n', 'false' or 'off'.
|
||||
|
||||
*New in 0.9.0*
|
||||
|
||||
*Changed in 0.11.1*: now accepts None and bool.
|
||||
"""
|
||||
if x in (None, MISSING):
|
||||
return False
|
||||
if isinstance(x, bool):
|
||||
return x
|
||||
if isinstance(x, int):
|
||||
return x != 0
|
||||
if isinstance(x, str):
|
||||
return x.lower() not in ('', '0', 'off', 'n', 'no', 'false', 'f')
|
||||
return True
|
||||
|
||||
|
||||
__all__ = ('matches', 'must_be', 'not_greater_than', 'not_less_than', 'yesno')
|
||||
|
||||
__all__ = ('matches', )
|
||||
|
|
@ -1,92 +0,0 @@
|
|||
"""
|
||||
Content serving API over HTTP, based on Starlette.
|
||||
|
||||
*New in 0.6.0*
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2025 Sakuragasaki46.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
See LICENSE for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This software is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
|
||||
from typing import Callable
|
||||
import warnings
|
||||
from starlette.applications import Starlette
|
||||
from starlette.responses import JSONResponse, PlainTextResponse, Response
|
||||
from starlette.routing import Route
|
||||
|
||||
from suou.itertools import makelist
|
||||
from suou.functools import future
|
||||
|
||||
@future()
|
||||
class Waiter():
|
||||
_cached_app: Callable | None = None
|
||||
|
||||
def __init__(self):
|
||||
self.routes: list[Route] = []
|
||||
self.production = False
|
||||
|
||||
async def __call__(self, *args):
|
||||
return await self._build_app()(*args)
|
||||
|
||||
def _build_app(self) -> Starlette:
|
||||
if not self._cached_app:
|
||||
self._cached_app = Starlette(
|
||||
debug = not self.production,
|
||||
routes= self.routes
|
||||
)
|
||||
return self._cached_app
|
||||
|
||||
def get(self, endpoint: str, *a, **k):
|
||||
return self._route('GET', endpoint, *a, **k)
|
||||
|
||||
def post(self, endpoint: str, *a, **k):
|
||||
return self._route('POST', endpoint, *a, **k)
|
||||
|
||||
def delete(self, endpoint: str, *a, **k):
|
||||
return self._route('DELETE', endpoint, *a, **k)
|
||||
|
||||
def put(self, endpoint: str, *a, **k):
|
||||
return self._route('PUT', endpoint, *a, **k)
|
||||
|
||||
def patch(self, endpoint: str, *a, **k):
|
||||
return self._route('PATCH', endpoint, *a, **k)
|
||||
|
||||
def _route(self, methods: list[str], endpoint: str, **kwargs):
|
||||
def decorator(func):
|
||||
self.routes.append(Route(endpoint, func, methods=makelist(methods, False), **kwargs))
|
||||
return func
|
||||
return decorator
|
||||
|
||||
## TODO get, post, etc.
|
||||
|
||||
def ok(content = None, **ka):
|
||||
if content is None:
|
||||
return Response(status_code=204, **ka)
|
||||
elif isinstance(content, dict):
|
||||
return JSONResponse(content, **ka)
|
||||
elif isinstance(content, str):
|
||||
return PlainTextResponse(content, **ka)
|
||||
return content
|
||||
|
||||
def ko(status: int, /, content = None, **ka):
|
||||
if status < 400 or status > 599:
|
||||
warnings.warn(f'HTTP {status} is not an error status', UserWarning)
|
||||
if content is None:
|
||||
return Response(status_code=status, **ka)
|
||||
elif isinstance(content, dict):
|
||||
return JSONResponse(content, status_code=status, **ka)
|
||||
elif isinstance(content, str):
|
||||
return PlainTextResponse(content, status_code=status, **ka)
|
||||
return content
|
||||
|
||||
# This module is experimental and therefore not re-exported into __init__
|
||||
__all__ = ('ko', 'ok', 'Waiter')
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
|
||||
|
||||
from datetime import timezone
|
||||
import datetime
|
||||
from suou.calendar import want_datetime, want_isodate
|
||||
|
||||
import unittest
|
||||
|
||||
|
||||
class TestCalendar(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
...
|
||||
def tearDown(self) -> None:
|
||||
...
|
||||
|
||||
def test_want_isodate(self):
|
||||
## if test fails, make sure time zone is set to UTC.
|
||||
self.assertEqual(want_isodate(0, tz=timezone.utc), '1970-01-01T00:00:00+00:00')
|
||||
self.assertEqual(want_isodate(86400, tz=timezone.utc), '1970-01-02T00:00:00+00:00')
|
||||
self.assertEqual(want_isodate(1577840584.0, tz=timezone.utc), '2020-01-01T01:03:04+00:00')
|
||||
# TODO
|
||||
|
||||
def test_want_datetime(self):
|
||||
self.assertEqual(want_datetime('2017-04-10T19:00:01', tz=timezone.utc) - want_datetime('2017-04-10T18:00:00', tz=timezone.utc), datetime.timedelta(seconds=3601))
|
||||
# TODO
|
||||
|
||||
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
|
||||
|
||||
import binascii
|
||||
import unittest
|
||||
from suou.codecs import b64encode, b64decode, want_urlsafe, z85decode
|
||||
|
||||
B1 = b'N\xf0\xb4\xc3\x85\n\xf9\xb6\x9a\x0f\x82\xa6\x99G\x07#'
|
||||
B2 = b'\xbcXiF,@|{\xbe\xe3\x0cz\xa8\xcbQ\x82'
|
||||
B3 = b"\xe9\x18)\xcb'\xc2\x96\xae\xde\x86"
|
||||
B4 = B1[-2:] + B2[:-2]
|
||||
B5 = b'\xff\xf8\xa7\x8a\xdf\xff'
|
||||
|
||||
|
||||
class TestCodecs(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
...
|
||||
def tearDown(self) -> None:
|
||||
...
|
||||
|
||||
def test_b64encode(self):
|
||||
self.assertEqual(b64encode(B1), 'TvC0w4UK-baaD4KmmUcHIw')
|
||||
self.assertEqual(b64encode(B2), 'vFhpRixAfHu-4wx6qMtRgg')
|
||||
self.assertEqual(b64encode(B3), '6RgpyyfClq7ehg')
|
||||
self.assertEqual(b64encode(B4), 'ByO8WGlGLEB8e77jDHqoyw')
|
||||
self.assertEqual(b64encode(B5), '__init__')
|
||||
self.assertEqual(b64encode(B1[:4]), 'TvC0ww')
|
||||
self.assertEqual(b64encode(b'\0' + B1[:4]), 'AE7wtMM')
|
||||
self.assertEqual(b64encode(b'\0\0\0\0\0' + B1[:4]), 'AAAAAABO8LTD')
|
||||
self.assertEqual(b64encode(b'\xff'), '_w')
|
||||
self.assertEqual(b64encode(b''), '')
|
||||
|
||||
def test_b64decode(self):
|
||||
self.assertEqual(b64decode('TvC0w4UK-baaD4KmmUcHIw'), B1)
|
||||
self.assertEqual(b64decode('vFhpRixAfHu-4wx6qMtRgg'), B2)
|
||||
self.assertEqual(b64decode('6RgpyyfClq7ehg'), B3)
|
||||
self.assertEqual(b64decode('ByO8WGlGLEB8e77jDHqoyw'), B4)
|
||||
self.assertEqual(b64decode('__init__'), B5)
|
||||
self.assertEqual(b64decode('//init//'), B5)
|
||||
self.assertEqual(b64decode('TvC0ww'), B1[:4])
|
||||
self.assertEqual(b64decode('AE7wtMM'), b'\0' + B1[:4])
|
||||
self.assertEqual(b64decode('AAAAAABO8LTD'), b'\0\0\0\0\0' + B1[:4])
|
||||
self.assertEqual(b64decode('_w'), b'\xff')
|
||||
self.assertEqual(b64decode(''), b'')
|
||||
|
||||
self.assertRaises(binascii.Error, b64decode, 'C')
|
||||
|
||||
def test_want_urlsafe(self):
|
||||
self.assertEqual('__init__', want_urlsafe('//init_/'))
|
||||
self.assertEqual('Disney-', want_urlsafe('Disney+'))
|
||||
self.assertEqual('spaziocosenza', want_urlsafe('spazio cosenza'))
|
||||
self.assertEqual('=======', want_urlsafe('======='))
|
||||
|
||||
def test_z85decode(self):
|
||||
self.assertEqual(z85decode('pvLTdG:NT:NH+1ENmvGb'), B1)
|
||||
self.assertEqual(z85decode('YJw(qei[PfZt/SFSln4&'), B2)
|
||||
self.assertEqual(z85decode('>[>>)c=hgL?I8'), B3)
|
||||
self.assertEqual(z85decode('2p3(-x*%TsE0-P/40[>}'), B4)
|
||||
self.assertEqual(z85decode('%m&HH?#r'), B5)
|
||||
self.assertEqual(z85decode('%m&HH?#uEvW8mO8}l(.5F#j@a2o%'), B5 + B1)
|
||||
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
|
||||
|
||||
|
||||
import unittest
|
||||
from suou import chalk
|
||||
|
||||
class TestColor(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
...
|
||||
def tearDown(self) -> None:
|
||||
...
|
||||
|
||||
def test_chalk_colors(self):
|
||||
strg = "The quick brown fox jumps over the lazy dog"
|
||||
|
||||
self.assertEqual(f'\x1b[31m{strg}\x1b[39m', chalk.red(strg))
|
||||
self.assertEqual(f'\x1b[32m{strg}\x1b[39m', chalk.green(strg))
|
||||
self.assertEqual(f'\x1b[34m{strg}\x1b[39m', chalk.blue(strg))
|
||||
self.assertEqual(f'\x1b[36m{strg}\x1b[39m', chalk.cyan(strg))
|
||||
self.assertEqual(f'\x1b[33m{strg}\x1b[39m', chalk.yellow(strg))
|
||||
self.assertEqual(f'\x1b[35m{strg}\x1b[39m', chalk.purple(strg))
|
||||
|
||||
def test_chalk_bold(self):
|
||||
strg = "The quick brown fox jumps over the lazy dog"
|
||||
self.assertEqual(f'\x1b[1m{strg}\x1b[22m', chalk.bold(strg))
|
||||
self.assertEqual(f'\x1b[2m{strg}\x1b[22m', chalk.faint(strg))
|
||||
self.assertEqual(f'\x1b[1m\x1b[33m{strg}\x1b[39m\x1b[22m', chalk.bold.yellow(strg))
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue