diff --git a/LICENSE.md b/LICENSE.txt similarity index 100% rename from LICENSE.md rename to LICENSE.txt diff --git a/docs/documentation/sardinopedia/basic_swimming_lessons.md b/docs/documentation/sardinopedia/basic_swimming_lessons.md index f1aafeaf..73b1f85b 100644 --- a/docs/documentation/sardinopedia/basic_swimming_lessons.md +++ b/docs/documentation/sardinopedia/basic_swimming_lessons.md @@ -143,6 +143,7 @@ In addition to that, take note of the `play()` method used for assigning a **Sen * `play(*args, **kwargs)`: the default **SuperDirt** (or **S**) Sender. * `play_midi(*args, **kwargs)`: the default **MIDI** (or **M**) Sender. * `play_osc(*args, **kwargs)`: the default **OSC** (or **O**) Sender. +* `run(func: Callable)`: run any function like if it was a surfboard! I repeat, these functions are basically senders with a different name! You will have to learn how to use **Senders** to be truly efficient with the surfing mode. You can spend your life using **Sardine** this way or combine it with *swimming functions*, this is entirely up to you! This mode was initially designed in order to demonstrate the syntax of [FoxDot](https://foxdot.org). I find it to be a fun and efficient way to jam along with friends as well :) You can just fire up a **Sardine** session and write pretty fast. diff --git a/fishery/UserConfig.py b/fishery/UserConfig.py index ce638cef..82a60800 100644 --- a/fishery/UserConfig.py +++ b/fishery/UserConfig.py @@ -1,7 +1,8 @@ -from dataclasses import dataclass import json +from dataclasses import dataclass from pathlib import Path from typing import Union + from appdirs import * from rich import print @@ -21,11 +22,12 @@ "midi": None, "bpm": 125, "beats": 4, - "ppqn": 48, "parameters": [], - "boot_superdirt": True, + "superdirt_handler": True, + "boot_supercollider": False, + "sardine_boot_file": True, "verbose_superdirt": False, - "active_clock": True, + "link_clock": False, "superdirt_config_path": str(USER_DIR / "default_superdirt.scd"), "user_config_path": str(USER_DIR / "user_configuration.py"), "deferred_scheduling": True, @@ -48,13 +50,14 @@ class Config: midi: Union[str, None] beats: int parameters: list - ppqn: int bpm: int superdirt_config_path: str verbose_superdirt: bool user_config_path: str - boot_superdirt: bool - active_clock: bool + boot_supercollider: bool + superdirt_handler: bool + sardine_boot_file: bool + link_clock: bool deferred_scheduling: bool @classmethod @@ -64,11 +67,12 @@ def from_dict(cls, data: dict) -> "Config": midi=config["midi"], beats=config["beats"], parameters=config["parameters"], - ppqn=config["ppqn"], bpm=config["bpm"], - boot_superdirt=config["boot_superdirt"], + superdirt_handler=config["superdirt_handler"], + boot_supercollider=config["boot_supercollider"], + sardine_boot_file=config["sardine_boot_file"], verbose_superdirt=config["verbose_superdirt"], - active_clock=config["active_clock"], + link_clock=config["link_clock"], superdirt_config_path=config["superdirt_config_path"], user_config_path=config["user_config_path"], deferred_scheduling=config["deferred_scheduling"], @@ -80,12 +84,13 @@ def to_dict(self) -> dict: "midi": self.midi, "beats": self.beats, "parameters": self.parameters, - "ppqn": self.ppqn, "bpm": self.bpm, - "boot_superdirt": self.boot_superdirt, + "superdirt_handler": self.superdirt_handler, + "boot_supercollider": self.boot_supercollider, + "sardine_boot_file": self.sardine_boot_file, "verbose_superdirt": self.verbose_superdirt, "superdirt_config_path": self.superdirt_config_path, - "active_clock": self.active_clock, + "link_clock": self.link_clock, "user_config_path": self.user_config_path, "deferred_scheduling": self.deferred_scheduling, } diff --git a/fishery/__main__.py b/fishery/__main__.py index f9a13b23..cc2b92fb 100644 --- a/fishery/__main__.py +++ b/fishery/__main__.py @@ -1,158 +1,61 @@ -# https://github.com/python/cpython/blob/main/Lib/asyncio/__main__.py -# Taken from the CPython Github Repository. Custom version of the -# asyncio REPL that will autoload Sardine whenever started. - -import concurrent.futures -import threading -import platform -import warnings -import inspect -import asyncio -import psutil -import types -import code -import ast -import os - -from appdirs import user_data_dir -from asyncio import futures -from pathlib import Path -from rich import print as pretty_print -from rich.panel import Panel - -system = platform.system() - -# Setting very high priority for this process (time-critical) -warning_text = "[yellow]/!\\\\[/yellow] [red bold] Run Sardine faster by starting it using\ -\nadministrator priviledges (sudo on Unix..)[/red bold] [yellow]/!\\\\[/yellow]" -if system == "Windows": - try: - p = psutil.Process(os.getpid()) - p.nice(psutil.HIGH_PRIORITY_CLASS) - except psutil.AccessDenied: - pretty_print(Panel.fit(warning_text)) - pass -else: - try: - p = psutil.Process(os.getpid()) - p.nice(-20) - except psutil.AccessDenied: - pretty_print(Panel.fit(warning_text)) - pass - - -# Appdirs boilerplate -APP_NAME, APP_AUTHOR = "Sardine", "Bubobubobubo" -USER_DIR = Path(user_data_dir(APP_NAME, APP_AUTHOR)) - - -class AsyncIOInteractiveConsole(code.InteractiveConsole): - def __init__(self, locals, loop): - super().__init__(locals) - self.compile.compiler.flags |= ast.PyCF_ALLOW_TOP_LEVEL_AWAIT - - self.loop = loop - - def runcode(self, code): - future = concurrent.futures.Future() - - def callback(): - global repl_future - global repl_future_interrupted - - repl_future = None - repl_future_interrupted = False - - func = types.FunctionType(code, self.locals) - try: - coro = func() - except SystemExit: - raise - except KeyboardInterrupt as ex: - repl_future_interrupted = True - future.set_exception(ex) - return - except BaseException as ex: - future.set_exception(ex) - return - - if not inspect.iscoroutine(coro): - future.set_result(coro) - return - - try: - repl_future = self.loop.create_task(coro) - futures._chain_future(repl_future, future) - except BaseException as exc: - future.set_exception(exc) - - loop.call_soon_threadsafe(callback) - - try: - return future.result() - except SystemExit: - raise - except BaseException: - if repl_future_interrupted: - self.write("\nKeyboardInterrupt\n") - else: - self.showtraceback() - - -class REPLThread(threading.Thread): - def run(self): - try: - banner = () - console.push("""import os""") - console.push("""os.environ['SARDINE_INIT_SESSION'] = 'YES'""") - console.push("""from sardine import *""") - console.interact(banner=banner, exitmsg="exiting asyncio REPL...") - finally: - warnings.filterwarnings( - "ignore", - message=r"^coroutine .* was never awaited$", - category=RuntimeWarning, - ) - - loop.call_soon_threadsafe(loop.stop) +import click + +from . import console +from .profiler import Profiler + +CONTEXT_SETTINGS = { + "help_option_names": ["-h", "--help"], +} + + +@click.group( + context_settings=CONTEXT_SETTINGS, + help="Starts sardine in an asyncio REPL.", + invoke_without_command=True, +) +@click.version_option( + package_name="sardine", + prog_name=__package__, + message="%(prog)s for %(package)s v%(version)s", +) +@click.pass_context +def main(ctx: click.Context): + if ctx.invoked_subcommand is None: + console.start() + + +@main.command( + short_help="Run sardine with a background profiler (requires the yappi package)", + help=""" + This command starts the deterministic profiler, yappi, and measures statistics + for both sardine and any functions written in the console. Once the REPL + is closed, a pstats file will be written containing the session's stats. + You can inspect the file's contents with Python's built-in pstats module + or a third-party package like snakeviz. + """, +) +@click.option( + "-c", + "--clock", + default="wall", + help="The clock type to use. Wall time includes time spent waiting, " + "while CPU time ignores it.", + show_default=True, + type=click.Choice(("cpu", "wall"), case_sensitive=False), +) +@click.option( + "-o", + "filepath", + default="stats.prof", + help="The path to use when outputting the pstats file", + show_default=True, + type=click.Path(dir_okay=False, writable=True), +) +def profile(clock: str, filepath: str): + profiler = Profiler(clock=clock, filepath=filepath) + with profiler: + console.start() if __name__ == "__main__": - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - repl_locals = {"asyncio": asyncio} - for key in { - "__name__", - "__package__", - "__loader__", - "__spec__", - "__builtins__", - "__file__", - }: - repl_locals[key] = locals()[key] - - console = AsyncIOInteractiveConsole(repl_locals, loop) - - repl_future = None - repl_future_interrupted = False - - try: - import readline # NoQA - except ImportError: - pass - - repl_thread = REPLThread() - repl_thread.daemon = True - repl_thread.start() - - while True: - try: - loop.run_forever() - except KeyboardInterrupt: - if repl_future and not repl_future.done(): - repl_future.cancel() - repl_future_interrupted = True - continue - else: - break + main() diff --git a/fishery/console.py b/fishery/console.py new file mode 100644 index 00000000..2b269b2e --- /dev/null +++ b/fishery/console.py @@ -0,0 +1,167 @@ +# https://github.com/python/cpython/blob/main/Lib/asyncio/__main__.py +# Taken from the CPython Github Repository. Custom version of the +# asyncio REPL that will autoload Sardine whenever started. + +import ast +import asyncio +import code +import concurrent.futures +import inspect + +# import os +# import platform +import threading +import types +import warnings +from asyncio import futures +from pathlib import Path +from typing import Optional + +# import psutil +from appdirs import user_data_dir +from rich import print as pretty_print +from rich.panel import Panel + +import sardine + +from .runners import Runner + +# system = platform.system() +# # Setting very high priority for this process (time-critical) +# warning_text = "[yellow]/!\\\\[/yellow] [red bold] Run Sardine faster by starting it using\ +# \nadministrator priviledges (sudo on Unix..)[/red bold] [yellow]/!\\\\[/yellow]" +# if system == "Windows": +# try: +# p = psutil.Process(os.getpid()) +# p.nice(psutil.HIGH_PRIORITY_CLASS) +# except psutil.AccessDenied: +# pretty_print(Panel.fit(warning_text)) +# pass +# else: +# try: +# p = psutil.Process(os.getpid()) +# p.nice(-20) +# except psutil.AccessDenied: +# pretty_print(Panel.fit(warning_text)) +# pass + + +# Appdirs boilerplate +APP_NAME, APP_AUTHOR = "Sardine", "Bubobubobubo" +USER_DIR = Path(user_data_dir(APP_NAME, APP_AUTHOR)) + + +class AsyncIOInteractiveConsole(code.InteractiveConsole): + def __init__(self, locals: dict, loop: asyncio.BaseEventLoop): + super().__init__(locals) + self.compile.compiler.flags |= ast.PyCF_ALLOW_TOP_LEVEL_AWAIT + + self.loop = loop + self.repl_future: Optional[asyncio.Task] = None + self.repl_future_interrupted = False + + def _callback(self, future: concurrent.futures.Future, code: types.CodeType): + self.repl_future = None + self.repl_future_interrupted = False + + func = types.FunctionType(code, self.locals) + try: + coro = func() + except SystemExit: + raise + except KeyboardInterrupt as ex: + self.repl_future_interrupted = True + future.set_exception(ex) + return + except BaseException as ex: + future.set_exception(ex) + return + + if not inspect.iscoroutine(coro): + future.set_result(coro) + return + + try: + self.repl_future = self.loop.create_task(coro) + futures._chain_future(self.repl_future, future) + except BaseException as exc: + future.set_exception(exc) + + def runcode(self, code: types.CodeType): + future = concurrent.futures.Future() + + self.loop.call_soon_threadsafe(self._callback, future, code) + + try: + return future.result() + except SystemExit: + raise + except BaseException: + if self.repl_future_interrupted: + self.write("\nKeyboardInterrupt\n") + else: + self.showtraceback() + + +class REPLThread(threading.Thread): + def __init__(self, *args, console: AsyncIOInteractiveConsole, **kwargs): + super().__init__(*args, **kwargs) + self.console = console + + def run(self): + try: + banner = () + self.console.push("""import os""") + self.console.push("""os.environ['SARDINE_INIT_SESSION'] = 'YES'""") + self.console.push("""from sardine.run import *""") + self.console.interact(banner=banner, exitmsg="exiting asyncio REPL...") + finally: + warnings.filterwarnings( + "ignore", + message=r"^coroutine .* was never awaited$", + category=RuntimeWarning, + ) + + +async def run_forever(): + loop = asyncio.get_running_loop() + await loop.create_future() + + +def start(): + loop = sardine.event_loop.new_event_loop() + + repl_locals = {"asyncio": asyncio} + for key in ( + "__name__", + "__package__", + "__loader__", + "__spec__", + "__builtins__", + "__file__", + ): + repl_locals[key] = globals()[key] + + console = AsyncIOInteractiveConsole(repl_locals, loop) + + try: + import readline # NoQA + except ImportError: + pass + + repl_thread = REPLThread(console=console) + repl_thread.daemon = True + repl_thread.start() + + with Runner(loop=loop) as runner: + while True: + try: + runner.run(run_forever()) + except KeyboardInterrupt: + if console.repl_future and not console.repl_future.done(): + console.repl_future.cancel() + console.repl_future_interrupted = True + else: + break + else: + break diff --git a/fishery/profiler.py b/fishery/profiler.py new file mode 100644 index 00000000..e038a76d --- /dev/null +++ b/fishery/profiler.py @@ -0,0 +1,35 @@ +from typing import Literal, Optional + +import click + +try: + import yappi +except ImportError: + yappi = None + +__all__ = ("Profiler", "yappi") + + +class Profiler: + def __init__( + self, + clock: Literal["CPU", "WALL"], + filepath: str, + ): + self.clock = clock + self.filepath = filepath + + def __enter__(self): + if yappi is None: + raise RuntimeError("yappi must be installed to enable profiling") + + yappi.set_clock_type(self.clock) + yappi.start(builtins=False) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + yappi.stop() + ystats = yappi.get_func_stats() + pstats = yappi.convert2pstats(ystats) + pstats.dump_stats(self.filepath) + click.echo(f"Profiler stats written to {self.filepath}") diff --git a/fishery/runners.py b/fishery/runners.py new file mode 100644 index 00000000..66250ab3 --- /dev/null +++ b/fishery/runners.py @@ -0,0 +1,193 @@ +# Modified copy of asyncio/runners.py from Python 3.11.0 +# https://github.com/python/cpython/blob/3.11/Lib/asyncio/runners.py + +__all__ = ("Runner",) + +import asyncio +import contextvars +import enum +import functools +import signal +import sys +import threading +from typing import Optional + + +class _State(enum.Enum): + CREATED = "created" + INITIALIZED = "initialized" + CLOSED = "closed" + + +class Runner: + """A context manager that controls event loop life cycle. + + The context manager always creates a new event loop, + allows to run async functions inside it, + and properly finalizes the loop at the context manager exit. + + If debug is True, the event loop will be run in debug mode. + If loop_factory is passed, it is used for new event loop creation. + + asyncio.run(main(), debug=True) + + is a shortcut for + + with asyncio.Runner(debug=True) as runner: + runner.run(main()) + + The run() method can be called multiple times within the runner's context. + + This can be useful for interactive console (e.g. IPython), + unittest runners, console tools, -- everywhere when async code + is called from existing sync framework and where the preferred single + asyncio.run() call doesn't work. + + """ + + # Note: the class is final, it is not intended for inheritance. + + def __init__(self, *, loop: asyncio.BaseEventLoop, debug: Optional[bool] = None): + # fishery: instead of a loop_factory argument, we're passing the loop directly + self._state = _State.CREATED + self._debug = debug + self._loop = loop + self._context = None + self._interrupt_count = 0 + self._set_event_loop = False + + def __enter__(self): + self._lazy_init() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def close(self): + """Shutdown and close event loop.""" + if self._state is not _State.INITIALIZED: + return + try: + loop = self._loop + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.run_until_complete(loop.shutdown_default_executor()) + finally: + if self._set_event_loop: + asyncio.set_event_loop(None) + loop.close() + self._loop = None + self._state = _State.CLOSED + + def get_loop(self): + """Return embedded event loop.""" + self._lazy_init() + return self._loop + + def run(self, coro, *, context=None): + """Run a coroutine inside the embedded event loop.""" + if not asyncio.iscoroutine(coro): + raise ValueError("a coroutine was expected, got {!r}".format(coro)) + + if asyncio._get_running_loop() is not None: + # fail fast with short traceback + raise RuntimeError( + "Runner.run() cannot be called from a running event loop" + ) + + self._lazy_init() + + if context is None: + context = self._context + + if sys.version_info >= (3, 11): + task = self._loop.create_task(coro, context=context) + else: + task = self._loop.create_task(coro) + + if ( + threading.current_thread() is threading.main_thread() + and signal.getsignal(signal.SIGINT) is signal.default_int_handler + ): + sigint_handler = functools.partial(self._on_sigint, main_task=task) + try: + signal.signal(signal.SIGINT, sigint_handler) + except ValueError: + # `signal.signal` may throw if `threading.main_thread` does + # not support signals (e.g. embedded interpreter with signals + # not registered - see gh-91880) + sigint_handler = None + else: + sigint_handler = None + + self._interrupt_count = 0 + try: + return self._loop.run_until_complete(task) + except asyncio.CancelledError: + if self._interrupt_count > 0: + uncancel = getattr(task, "uncancel", None) + # fishery: we do want KeyboardInterrupt propagated on lower versions + # if uncancel is not None and uncancel() == 0: + if uncancel is None or uncancel() == 0: + raise KeyboardInterrupt() + raise # CancelledError + finally: + if ( + sigint_handler is not None + and signal.getsignal(signal.SIGINT) is sigint_handler + ): + signal.signal(signal.SIGINT, signal.default_int_handler) + + def _lazy_init(self): + if self._state is _State.CLOSED: + raise RuntimeError("Runner is closed") + if self._state is _State.INITIALIZED: + return + + # if self._loop_factory is None: + # self._loop = asyncio.new_event_loop() + # if not self._set_event_loop: + # # Call set_event_loop only once to avoid calling + # # attach_loop multiple times on child watchers + # asyncio.set_event_loop(self._loop) + # self._set_event_loop = True + # else: + # self._loop = self._loop_factory() + asyncio.set_event_loop(self._loop) + + if self._debug is not None: + self._loop.set_debug(self._debug) + self._context = contextvars.copy_context() + self._state = _State.INITIALIZED + + def _on_sigint(self, signum, frame, main_task): + self._interrupt_count += 1 + if self._interrupt_count == 1 and not main_task.done(): + main_task.cancel() + # wakeup loop if it is blocked by select() with long timeout + self._loop.call_soon_threadsafe(lambda: None) + return + raise KeyboardInterrupt() + + +def _cancel_all_tasks(loop: asyncio.BaseEventLoop): + to_cancel = asyncio.all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) diff --git a/paper/Makefile b/paper/Makefile new file mode 100644 index 00000000..680c694c --- /dev/null +++ b/paper/Makefile @@ -0,0 +1,16 @@ +all: iclc2023.pdf iclc2023.html + +clean: + rm iclc2023.pdf iclc2023.html + +iclc2023.html: iclc2023.md references.bib + pandoc --template=pandoc/iclc.html --citeproc --number-sections iclc2023.md -o iclc2023.html --pdf-engine=xelatex + +iclc2023.pdf: iclc2023.md references.bib pandoc/iclc.latex pandoc/iclc.sty + pandoc --template=pandoc/iclc.latex --citeproc --number-sections iclc2023.md -o iclc2023.pdf --pdf-engine=xelatex + +iclc2023.docx: iclc2023.md references.bib + pandoc --citeproc --number-sections iclc2023.md -o iclc2023.docx + +iclc2023x.pdf: iclc2023.md references.bib pandoc/iclc.latex pandoc/iclc.sty + pandoc --template=pandoc/iclc.latex --citeproc --number-sections iclc2023.md --latex-engine=xelatex -o iclc2023x.pdf diff --git a/paper/css/iclc.css b/paper/css/iclc.css new file mode 100755 index 00000000..2d41b3c1 --- /dev/null +++ b/paper/css/iclc.css @@ -0,0 +1,11 @@ +body { + font-family: Linux Libertine O; + padding: 3em; + max-width: 60em; +} + +img { + max-width: 100%; + height: auto; + width: auto\9; /* ie8 */ +} \ No newline at end of file diff --git a/paper/iclc2023.html b/paper/iclc2023.html new file mode 100644 index 00000000..4092a0b4 --- /dev/null +++ b/paper/iclc2023.html @@ -0,0 +1,376 @@ + + + + + + + Sardine: a Modular Python Live Coding Environment + + + + + + + +

Abstract

+
+

Sardine is a live coding environment and library for Python 3.10+ +focusing on the modularity and extensibility of its base components +(clocks, parser, handlers). Sardine has been designed to be +easily integrated with existing live-coding environments as +both a tool for experimentation and demonstration of various live coding +techniques : temporal recursion, patterning, integration in various +hardware and software setups. Although the tool is still in active +development, it has already been used in multiple performances and +algoraves. This paper is dedicated to the introduction of the +Sardine system and the explanation of the main +guidelines currently followed by contributors to the project. It will +also present the preliminary results of our work through practical +realizations that served as objectives and experimental validations for +the first stages of development.

+
+ +

1 Introduction

+

The Python language, with its large collection of libraries and +modules, makes for a suitable environment for implementing a live coding +library. The popularity of Python ensures good user support and many +options for customization and integration into different text editors +and running environments. The possibility to enjoy a powerful ecosystem +of libraries focused on input/output, network communication or data +processing makes it a prime target for the implementation of a library +focused on experimentation in digital interactive media. Moreover, +thanks to its lightweight syntax, Python can be read by programmers +coming from different domains with a minimal adaptation time, making it +a great platform for sharing information, collaborating and +demonstrating the implementation of a given feature.

+

Sardine is born out of a curiosity for similar +Python-based live-coding libraries such as FoxDot, Isobar or the more recent +TidalVortex. It is +also best described as an attempt to re-create and bring the concept of +temporal recursion and TidalCycles patterning to Python.

+

2 Methodology and objectives: a +framework for exploring live-coding in Python

+

3 Sardine implementation

+

Sardine is implemented and distributed as two Python +modules: Fishery and Sardine.

+

3.1 Clock and Scheduling +System

+

3.2 Sardine Pattern Language

+

A small patterning language has been developed for +Sardine using the Lark parsing toolkit. +Defined as a LALR parser, the syntax of the language is best described +as a list-based calculator capable of dealing with basic MIDI note +definition, custom chance operators and other composition tools.

+

3.3 Players and Handlers

+

Description of the event based system. How to define an handler, what +is an hadler, etc…

+

Demo of the SuperDirt handler, etc…

+

4 Sardine usage

+

Basic facts about the usage of Sardine in various text editing +environments + how to install and handle a Sardine installation.

+

4.1 Algorave and performance

+

Zorba, Lorient, example code taken from performances.

+

4.2 Controlling Legacy MIDI +Synthesizers

+

Rémi Georges usage of Sardine: controlling legacy synthesizers along +with TidalCycles, etc…

+

4.3 Usage of Sardine at the IIL +Laboratory

+

Projects involving the Magnetic Resonator Piano, Boids, etc…

+

5 Project directions

+

5.1 Packaging and distribution

+

Distribution and release for Python 3.11 with updated C++ +dependencies whenever possible. Distribution on Pypi when it’ll be bug +free, etc…

+

5.2 Opening up for +collaboration

+

Documenting, section about the website and integration of the +Sardinopedia.

+

5.3 Creation and performance

+

6 Conclusion

+

Call for contributors, etc…

+

7 Acknowledgments

+

I warmly thank my thesis supervisors Laurent Pottier and Alain +Bonardi for their support and advice in the creation of this tool. I +thank the doctoral school 3LA from the University of Lyon for +the funding it provided to this research. I extend my thanks to the +musicians and friends who allowed me to take Sardine on +stage and to present it to a wider audience these few last months: the +Cookie Collective, Rémi Georges, etc…

+

Welcome to the markdown (aka commonmark) template for the +International Conference on Live Coding 2023.

+

This document is a guide to using markdown for the conference, and is +itself written in markdown. For full understanding, refer to +iclc2023.txt to see the source of this document, and iclc2023.pdf to see +the typeset output. Use of this template is currently only recommended +for those familiar with commandline tools.

+

We suggest you take a copy of this template (iclc2023.txt), and use +it as a starting point for your ICLC paper.

+

Preparing your submission using markdown will enable us to make +proceedings available both in PDF files suitable for print, and in HTML +suitable for the web. This is useful for making sure your paper is fully +accessible, via Internet search, and with assistive technology such as +screen readers for blind people. We recommend taking a straightforward +approach to formatting your document.

+

If you do not wish to use markdown, please do not be discouraged from +submitting your paper. There is also a word document template available +from the conference website.

+

8 Learning and using markdown

+

We are happy to answer any questions you have about markdown in +connection with your conference submission.

+

If you have questions, you can email us directly: iclc@creativecodingutrecht.nl

+

8.1 Running pandoc

+

Pandoc is software which turns text written in markdown into a +beautiful looking document, complete with references. You will need to +run it to create PDF documents of your paper for checking and uploading +for peer review.

+

You may download pandoc for all major operating systems (including MS +Windows, Apple Mac OS and GNU/Linux) from the following website: http://pandoc.org

+

As an alternative to the above downloads, on OS X only, the homebrew +package manager can be used to install pandoc: http://brew.sh/

+

If you use homebrew to install on OS X you will need to install the +pandoc package as follows:

+
brew update
+brew install pandoc
+

To produce PDF files you will need to have LaTeX installed, as well +as pandoc. See the pandoc website for installation instructions: http://pandoc.org/installing.html. +LaTeX is used internally, you will not have to edit any LaTeX +documents.

+

To render your markdown source as HTML, open a terminal window, +change into the folder where the template is and run the following +command:

+
pandoc --template=pandoc/iclc.html --citeproc --number-sections iclc2023.md -o iclc2023.html
+

To produce a PDF document, make sure you have LaTeX installed (see +above), and run the following:

+
pandoc --template=pandoc/iclc.latex --citeproc --number-sections iclc2023.md -o iclc2023.pdf
+

For a higher quality output, add the option +--latex-engine=xelatex to the above. You will need the Inconsolata +and Linux +Libertine opentype fonts installed.

+

An example Makefile is also provided to run these commands for you. +If you have make installed, you can use it to build the pdf +files.

+

8.2 Bibliographic references

+

Pandoc accepts bibliographic databases in a range of formats, so make +sure you have the right extension on your file.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Supported bibliography formats with file extension.
FormatFile extension
MODS.mods
BibLaTeX.bib
BibTeX.bibtex
RIS.ris
EndNote.enl
EndNote XML.xml
ISI.wos
MEDLINE.medline
Copac.copac
JSON citeproc.json
+

Authors may be referenced in two ways; inline, e.g. Schwitters (1932) +wrote the Ursonate sound poem, or in parenthesis, e.g. Ursonate is a +sound poem (Schwitters +1932). Multiple references should be grouped together like so +(Schwitters 1932; Miller +1956; Greenewalt 1946).

+

The pandoc command given in the above section +will automatically render your references according to Chicago +author-date style.

+

At the head of the markdown source file for this template, you will +see an entry for “bibliography” that points to the file references.bib. +Here you’ll find examples of bibliography entries in BibLaTex format, +including examples for articles, books, book chapters and items from +conference proceedings.

+

8.3 Code

+

We have chosen a single column layout to better support code examples +without having to break lines. The following shows how to include a code +example with syntax highlighting:

+
d1 $ every 3 (iter 4) $ brak $ "bd [sn [[sn bd] sn]]*1/3"
+

For more information please visit this page: +[https://hackage.haskell.org/package/skylighting]

+

8.4 Figures

+

Images should be included as figures, with captions provided and +formatted as shown in Figure 1. Be prepared for the page layout and +image size to be changed during the editing and layout process, and +consider this when referring to the figures in the text.

+
+ + +
+

9 Conclusion

+

We look forward to receiving your completed papers. Submission is +through the online peer review system at +https://iclc2023.creativecodingutrecht.nl/openconf.php only. Do not send +papers directly by e-mail. In all cases, please submit a PDF version of +your paper for peer review. At a later stage in preparing the +proceedings, we may ask for the markdown or Word versions of your +paper.

+

9.1 Acknowledgments

+

At the end of the Conclusions, acknowledgements to people, projects, +funding agencies, etc. can be included after the second-level heading +“Acknowledgments”.

+

References

+
+
+Greenewalt, Mary H. 1946. Nourathar, the Fine Art of Light Color +Playing. Philadelphia. Pa. Westbrook. +
+
+Miller, G. A. 1956. “The Magical Number Seven Plus or Minus Two: +Some Limits on Our Capacity for Processing Information.” +Psychological Review 63 (2): 81–97. +
+
+Schwitters, Kurt. 1932. “Ursonate.” Merz 24. +
+
+ + diff --git a/paper/iclc2023.md b/paper/iclc2023.md new file mode 100755 index 00000000..83a1d5ef --- /dev/null +++ b/paper/iclc2023.md @@ -0,0 +1,225 @@ +--- +# This template is licensed under a Creative Commons 0 1.0 Universal License (CC0 1.0). Public Domain Dedication. + +title: 'Sardine: a Modular Python Live Coding Environment' +author: + - name: Raphaël Forment + affiliation: Université Jean Monnet (Saint Étienne, ECLLA) + email: raphael.forment@gmail.com +abstract: | + Sardine is a live coding environment and library for Python 3.10+ focusing on the modularity and extensibility of its base components (clocks, parser, *handlers*). Sardine has been designed to be easily integrated with existing *live-coding* environments as both a tool for experimentation and demonstration of various live coding techniques : temporal recursion, patterning, integration in various hardware and software setups. Although the tool is still in active development, it has already been used in multiple performances and algoraves. This paper is dedicated to the introduction of the **Sardine** system and the explanation of the main guidelines currently followed by contributors to the project. It will also present the preliminary results of our work through practical realizations that served as objectives and experimental validations for the first stages of development. +fontsize: 11pt +geometry: margin=2cm +fontfamily: libertine +fontfamily: inconsolata +mainfont: Linux Libertine O +monofont: Inconsolata +bibliography: references.bib +... + +# Introduction + +The Python language, with its large collection of libraries and modules, makes for a suitable environment for implementing a live coding library. The popularity of Python ensures good user support and many options for customization and integration into different text editors and running environments. The possibility to enjoy a powerful ecosystem of libraries focused on input/output, network communication or data processing makes it a prime target for the implementation of a library focused on experimentation in digital interactive media. Moreover, thanks to its lightweight syntax, Python can be read by programmers coming from different domains with a minimal adaptation time, making it a great platform for sharing information, collaborating and demonstrating the implementation of a given feature. + +**Sardine** is born out of a curiosity for similar Python-based live-coding libraries such as [FoxDot](https://foxdot.org), [Isobar](https://github.com/ideoforms/isobar) or the more recent [TidalVortex](https://github.com/tidalcycles/vortex). It is also best described as an attempt to re-create and bring the concept of temporal recursion and TidalCycles patterning to Python. + +# Methodology and objectives: a framework for exploring live-coding in Python + +# Sardine implementation + +**Sardine** is implemented and distributed as two Python modules: **Fishery** and **Sardine**. + +## Clock and Scheduling System + +## Sardine Pattern Language + +A small patterning language has been developed for **Sardine** using the [Lark](https://github.com/lark-parser/lark) parsing toolkit. Defined as a LALR parser, the syntax of the language is best described as a list-based calculator capable of dealing with basic MIDI note definition, custom chance operators and other composition tools. + +## Players and Handlers + +Description of the event based system. How to define an handler, what is an hadler, etc... + +Demo of the **SuperDirt** handler, etc... + +# Sardine usage + +Basic facts about the usage of Sardine in various text editing environments + how to install and handle a Sardine installation. + +## Algorave and performance + +Zorba, Lorient, example code taken from performances. + +## Controlling Legacy MIDI Synthesizers + +Rémi Georges usage of Sardine: controlling legacy synthesizers along with TidalCycles, etc... + +## Usage of Sardine at the IIL Laboratory + +Projects involving the Magnetic Resonator Piano, Boids, etc... + +# Project directions + +## Packaging and distribution + +Distribution and release for Python 3.11 with updated C++ dependencies whenever possible. Distribution on Pypi when it'll be bug free, etc... + +## Opening up for collaboration + +Documenting, section about the website and integration of the Sardinopedia. + +## Creation and performance + +# Conclusion + +Call for contributors, etc... + +# Acknowledgments + +I warmly thank my thesis supervisors Laurent Pottier and Alain Bonardi for their support and advice in the creation of this tool. I thank the doctoral school *3LA* from the University of Lyon for the funding it provided to this research. I extend my thanks to the musicians and friends who allowed me to take **Sardine** on stage and to present it to a wider audience these few last months: the **Cookie Collective**, Rémi Georges, etc... + +Welcome to the markdown (aka commonmark) template for the +International Conference on Live Coding 2023. + +This document is a guide to using markdown for the conference, and is +itself written in markdown. For full understanding, refer to +iclc2023.txt to see the source of this document, and iclc2023.pdf to +see the typeset output. Use of this template is currently only +recommended for those familiar with commandline tools. + +We suggest you take a copy of this template (iclc2023.txt), and use it +as a starting point for your ICLC paper. + +Preparing your submission using markdown will enable us to make proceedings +available both in PDF files suitable for print, and in HTML suitable +for the web. This is useful for making sure your paper is fully +accessible, via Internet search, and with assistive technology such as +screen readers for blind people. We recommend taking a straightforward approach to +formatting your document. + +If you do not wish to use markdown, please do not be discouraged from +submitting your paper. There is also a word document template +available from the conference website. + +# Learning and using markdown + +We are happy to answer any questions you have about markdown in connection with your +conference submission. + +If you have questions, you can email us directly: + [iclc@creativecodingutrecht.nl](mailto:iclc@creativecodingutrecht.nl) + +## Running pandoc {#pandoc} + +Pandoc is software which turns text written in markdown into a +beautiful looking document, complete with references. You will need to +run it to create PDF documents of your paper for checking and +uploading for peer review. + +You may download pandoc for all major operating systems (including MS +Windows, Apple Mac OS and GNU/Linux) from the following website: +[http://pandoc.org](http://pandoc.org) + +As an alternative to the above downloads, on OS X only, the homebrew +package manager can be used to install pandoc: [http://brew.sh/](http://brew.sh/) + +If you use homebrew to install on OS X you will need to install the pandoc package as follows: +``` +brew update +brew install pandoc +``` + +To produce PDF files you will need to have LaTeX installed, as well as +pandoc. See the pandoc website for installation instructions: +[http://pandoc.org/installing.html](http://pandoc.org/installing.html). LaTeX +is used internally, you will not have to edit any LaTeX documents. + +To render your markdown source as HTML, open a terminal window, change +into the folder where the template is and run the following command: + +~~~~ {.bash} +pandoc --template=pandoc/iclc.html --citeproc --number-sections iclc2023.md -o iclc2023.html +~~~~ + +To produce a PDF document, make sure you have LaTeX installed (see +above), and run the following: + +~~~~ {.bash} +pandoc --template=pandoc/iclc.latex --citeproc --number-sections iclc2023.md -o iclc2023.pdf +~~~~ + +For a higher quality output, add the option `--latex-engine=xelatex` +to the above. You will need the [Inconsolata](http://levien.com/type/myfonts/inconsolata.html) and [Linux Libertine](http://www.linuxlibertine.org/index.php?id=91&L=1) opentype fonts installed. + +An example Makefile is also provided to run these commands for you. If you have *make* installed, you can use it to build the pdf files. + +## Bibliographic references + +Pandoc accepts bibliographic databases in a range of formats, so make +sure you have the right extension on your file. + + Format File extension + ------------ -------------- + MODS .mods + BibLaTeX .bib + BibTeX .bibtex + RIS .ris + EndNote .enl + EndNote XML .xml + ISI .wos + MEDLINE .medline + Copac .copac + JSON citeproc .json + +Table: Supported bibliography formats with file extension. + +Authors may be referenced in two ways; inline, e.g. @Schwitters32 +wrote the Ursonate sound poem, or in parenthesis, e.g. Ursonate is a +sound poem [@Schwitters32]. Multiple references should be grouped +together like so [@Schwitters32;@Miller56;@Greenewalt46]. + +The pandoc command given in the [above section](#pandoc) will automatically +render your references according to Chicago author-date style. + +At the head of the markdown source file for this template, you will see an entry +for "bibliography" that points to the file references.bib. Here you'll find +examples of bibliography entries in BibLaTex format, including examples for +articles, books, book chapters and items from conference proceedings. + +## Code + +We have chosen a single column layout to better support code examples +without having to break lines. The following shows how to include a +code example with syntax highlighting: + +~~~~ {.haskell} +d1 $ every 3 (iter 4) $ brak $ "bd [sn [[sn bd] sn]]*1/3" +~~~~ + +For more information please visit this page: +[https://hackage.haskell.org/package/skylighting] + +## Figures + +Images should be included as figures, with captions provided and +formatted as shown in Figure 1. Be prepared for the page layout and +image size to be changed during the editing and layout process, and +consider this when referring to the figures in the text. + +![*A descriptive caption should be given for all figures, understandable without reference to the rest of the article.*](images/pomeroy.jpg) + +# Conclusion + +We look forward to receiving your completed papers. Submission is through the +online peer review system at https://iclc2023.creativecodingutrecht.nl/openconf.php only. +Do not send papers directly by e-mail. In all cases, please submit a PDF version of +your paper for peer review. At a later stage in preparing the proceedings, we may ask +for the markdown or Word versions of your paper. + +## Acknowledgments + + +At the end of the Conclusions, acknowledgements to people, projects, funding +agencies, etc. can be included after the second-level heading “Acknowledgments”. + + +# References diff --git a/paper/iclc2023.pdf b/paper/iclc2023.pdf new file mode 100644 index 00000000..000f16e5 Binary files /dev/null and b/paper/iclc2023.pdf differ diff --git a/paper/images/pomeroy.jpg b/paper/images/pomeroy.jpg new file mode 100755 index 00000000..2b99dfba Binary files /dev/null and b/paper/images/pomeroy.jpg differ diff --git a/paper/inconsolata.sty b/paper/inconsolata.sty new file mode 100644 index 00000000..80011e94 --- /dev/null +++ b/paper/inconsolata.sty @@ -0,0 +1,92 @@ +% Copyright 2014 Michael Sharpe +% Based initially on Karl Berry's inconsolata.sty. +% You may freely use, modify and/or distribute this file. + +\def\fileversion{1.05} +\def\filedate{2014/06/22} +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{inconsolata}[\filedate\space v\fileversion] +\message{`inconsolata-zi4' v\fileversion, \filedate\space Text macros for Inconsolata (msharpe)} + +\RequirePackage{textcomp} +\RequirePackage{keyval} + +\newcount\zifour@ocount +\newif\ifzifour@altzero +\newif\ifzifour@noupq +\define@key{zifour}{scaled}[1.0]{\def\zifour@scaled{s*[#1]}} + +\DeclareOption*{% + \begingroup + \edef\x{\endgroup + \noexpand\setkeys{zifour}{\CurrentOption}}% + \x} + +% by default, change \tt to mean zi4. +\newcommand*{\zifour@default}{% + \renewcommand*{\ttdefault}{zi4}% +} + +% option [nott] to avoid changing tt. +\DeclareOption{nott}{% + \renewcommand*{\zifour@default}{}% +} +% option [noupquote] to prevent loading upquote. +\DeclareOption{noupquote}{% + \zifour@noupqtrue}% + +% option var0---use unslashed zero (slashed is default) +\DeclareOption{var0}{% + \zifour@altzerotrue\advance\zifour@ocount \tw@ % +} +\DeclareOption{varl}{% + \advance\zifour@ocount \@ne % +} +\DeclareOption{varqu}{% + \advance\zifour@ocount 4\relax % +} + +\ProcessOptions* +\zifour@default +\edef\zifour@opt{\the\zifour@ocount} +\ifzifour@altzero + \advance\zifour@ocount -\tw@ +\else + \advance\zifour@ocount \tw@ +\fi +\edef\zifour@altopt{\the\zifour@ocount} +% define an \altzero macro which flips to slashed, unslashed +\def\altzero{{\fontfamily{zi4}% + \fontshape{scit}% + \selectfont 0}} + +\def\zifour@T@ne@nc{T1} +\def\zifour@OT@ne@nc{OT1} +\def\zifour@LY@ne@nc{LY1} +\def\zifour@QX@nc{QX} +\def\zifour@TQS{% +\UndeclareTextCommand{\textquotesingle}{\encodingdefault} +\DeclareTextSymbol{\textquotesingle}{TS1}{39}} + +\ifzifour@noupq% do nothing + % Try to correct for wrong slots for QX + \ifx\encodingdefault\zifour@QX@nc + \zifour@TQS + \else + \ifx\encodingdefault\zifour@LY@ne@nc + \zifour@TQS + \fi + \fi +\else + \AtBeginDocument{% + \ifx\encodingdefault\zifour@T@ne@nc % do nothing + \else + \ifx\encodingdefault\zifour@OT@ne@nc % do nothing + \else + \zifour@TQS + \fi + \fi + \usepackage{upquote}} +\fi + +\endinput diff --git a/paper/pandoc/iclc.html b/paper/pandoc/iclc.html new file mode 100755 index 00000000..90406b16 --- /dev/null +++ b/paper/pandoc/iclc.html @@ -0,0 +1,80 @@ +$if(false)$ + +This is a pandoc template and should not be edited. + +$endif$ + + + + + + +$for(author-meta)$ + +$endfor$ +$if(date-meta)$ + +$endif$ + $if(title-prefix)$$title-prefix$ - $endif$$pagetitle$ + +$if(quotes)$ + +$endif$ +$if(highlighting-css)$ + +$endif$ + +$for(css)$ + +$endfor$ +$if(math)$ + $math$ +$endif$ +$for(header-includes)$ + $header-includes$ +$endfor$ + + +$for(include-before)$ +$include-before$ +$endfor$ +$if(title)$ +
+

$title$

+$if(subtitle)$ +

$subtitle$

+$endif$ + +$if(date)$ +

$date$

+$endif$ +
+$endif$ +$if(toc)$ +
+$toc$ +
+$endif$ + +

Abstract

+
+$if(abstract)$ +$abstract$ +$else$ +Please provide an abstract in the metadata block at the top of your +markdown document. Refer to template.txt for details. +$endif$ +
+ +$body$ +$for(include-after)$ +$include-after$ +$endfor$ + + diff --git a/paper/pandoc/iclc.latex b/paper/pandoc/iclc.latex new file mode 100755 index 00000000..75fe523a --- /dev/null +++ b/paper/pandoc/iclc.latex @@ -0,0 +1,224 @@ +$if(false)$ + +This is a pandoc template and should not be edited. + +$endif$ +\documentclass[$if(fontsize)$$fontsize$,$endif$$if(lang)$$lang$,$endif$$if(papersize)$$papersize$,$endif$$for(classoption)$$classoption$$sep$,$endfor$]{$documentclass$} + +\usepackage{pandoc/iclc} +$if(linestretch)$ +\usepackage{setspace} +\setstretch{$linestretch$} +$endif$ + +\usepackage{amssymb,amsmath} +\usepackage{ifxetex,ifluatex} +\usepackage{fixltx2e} % provides \textsubscript +\ifnum 0\ifxetex 1\fi\ifluatex 1\fi=0 % if pdftex + $if(fontfamily)$ + \usepackage{$fontfamily$} + \usepackage{inconsolata} + $else$ + \usepackage{lmodern} + $endif$ + \usepackage[T1]{fontenc} + \usepackage[utf8]{inputenc} +$if(euro)$ + \usepackage{eurosym} +$endif$ +\else % if luatex or xelatex + \ifxetex + \usepackage{mathspec} + \usepackage{xltxtra,xunicode} + \else + \usepackage{fontspec} + \fi + \defaultfontfeatures{Mapping=tex-text,Scale=MatchLowercase} + \newcommand{\euro}{€} +$if(mainfont)$ + \setmainfont{$mainfont$} +$endif$ +$if(sansfont)$ + \setsansfont{$sansfont$} +$endif$ +$if(monofont)$ + \setmonofont[Mapping=tex-ansi]{$monofont$} +$endif$ +$if(mathfont)$ + \setmathfont(Digits,Latin,Greek){$mathfont$} +$endif$ +\fi +% use upquote if available, for straight quotes in verbatim environments +\IfFileExists{upquote.sty}{\usepackage{upquote}}{} +% use microtype if available +\IfFileExists{microtype.sty}{% +\usepackage{microtype} +\UseMicrotypeSet[protrusion]{basicmath} % disable protrusion for tt fonts +}{} +$if(geometry)$ +\usepackage[$for(geometry)$$geometry$$sep$,$endfor$]{geometry} +$endif$ +$if(lang)$ +\ifxetex + \usepackage{polyglossia} + \setmainlanguage{$mainlang$} +\else + \usepackage[shorthands=off,$lang$]{babel} +\fi +$endif$ +$if(natbib)$ +\usepackage{natbib} +\bibliographystyle{$if(biblio-style)$$biblio-style$$else$plainnat$endif$} +$endif$ +$if(biblatex)$ +\usepackage{biblatex} +$if(biblio-files)$ +\bibliography{$biblio-files$} +$endif$ +$endif$ +$if(listings)$ +\usepackage{listings} +$endif$ +$if(lhs)$ +\lstnewenvironment{code}{\lstset{language=Haskell,basicstyle=\small\ttfamily}}{} +$endif$ +$if(highlighting-macros)$ +$highlighting-macros$ +$endif$ +$if(verbatim-in-note)$ +\usepackage{fancyvrb} +\VerbatimFootnotes +$endif$ +$if(tables)$ +\usepackage{longtable,booktabs} +$endif$ +$if(csl-refs)$ +\newlength{\cslhangindent} +\setlength{\cslhangindent}{1.5em} +\newenvironment{CSLReferences}% + {$if(csl-hanging-indent)$\setlength{\parindent}{0pt}% + \everypar{\setlength{\hangindent}{\cslhangindent}}\ignorespaces$endif$}% + {\par} +$endif$ +$if(graphics)$ +\usepackage{graphicx} +\makeatletter +\def\maxwidth{\ifdim\Gin@nat@width>\linewidth\linewidth\else\Gin@nat@width\fi} +\def\maxheight{\ifdim\Gin@nat@height>\textheight\textheight\else\Gin@nat@height\fi} +\makeatother +% Scale images if necessary, so that they will not overflow the page +% margins by default, and it is still possible to overwrite the defaults +% using explicit options in \includegraphics[width, height, ...]{} +\setkeys{Gin}{width=\maxwidth,height=\maxheight,keepaspectratio} +$endif$ +\ifxetex + \usepackage[setpagesize=false, % page size defined by xetex + unicode=false, % unicode breaks when used with xetex + xetex]{hyperref} +\else + \usepackage[unicode=true]{hyperref} +\fi +\hypersetup{breaklinks=true, + bookmarks=true, + pdfauthor={$author-meta$}, + pdftitle={$title-meta$}, + colorlinks=true, + citecolor=$if(citecolor)$$citecolor$$else$blue$endif$, + urlcolor=$if(urlcolor)$$urlcolor$$else$blue$endif$, + linkcolor=$if(linkcolor)$$linkcolor$$else$magenta$endif$, + pdfborder={0 0 0}} +\urlstyle{same} % don't use monospace font for urls +$if(links-as-notes)$ +% Make links footnotes instead of hotlinks: +\renewcommand{\href}[2]{#2\footnote{\url{#1}}} +$endif$ +$if(strikeout)$ +\usepackage[normalem]{ulem} +% avoid problems with \sout in headers with hyperref: +\pdfstringdefDisableCommands{\renewcommand{\sout}{}} +$endif$ +\setlength{\parindent}{0pt} +\setlength{\parskip}{6pt plus 2pt minus 1pt} +\setlength{\emergencystretch}{3em} % prevent overfull lines +\providecommand{\tightlist}{%/ + \setlength{\itemsep}{0pt}\setlength{\parskip}{0pt}} +$if(numbersections)$ +\setcounter{secnumdepth}{5} +$else$ +\setcounter{secnumdepth}{0} +$endif$ +$if(verbatim-in-note)$ +\VerbatimFootnotes % allows verbatim text in footnotes +$endif$ + +$if(title)$ +\title{$title$$if(subtitle)$\\\vspace{0.5em}{\large $subtitle$}$endif$} +$endif$ +$if(author)$ +\author{ +$for(author)$ + $author.name$ \\ + $author.affiliation$\\ + \href{mailto:$author.email$}{$author.email$} + $sep$ \and +$endfor$ +} +$endif$ +\date{$date$} +$for(header-includes)$ +$header-includes$ +$endfor$ + +\begin{document} +$if(title)$ +\maketitle +$endif$ +\begin{abstract} +$if(abstract)$ +$abstract$ +$else$ +Please provide an abstract in the metadata block at the top of the +markdown document. Refer to template.txt for details. $endif$ +\end{abstract} + +$for(include-before)$ +$include-before$ + +$endfor$ +$if(toc)$ +{ +\hypersetup{linkcolor=black} +\setcounter{tocdepth}{$toc-depth$} +\tableofcontents +} +$endif$ +$if(lot)$ +\listoftables +$endif$ +$if(lof)$ +\listoffigures +$endif$ +$body$ + +$if(natbib)$ +$if(biblio-files)$ +$if(biblio-title)$ +$if(book-class)$ +\renewcommand\bibname{$biblio-title$} +$else$ +\renewcommand\refname{$biblio-title$} +$endif$ +$endif$ +\bibliography{$biblio-files$} + +$endif$ +$endif$ +$if(biblatex)$ +\printbibliography$if(biblio-title)$[title=$biblio-title$]$endif$ + +$endif$ +$for(include-after)$ +$include-after$ + +$endfor$ +\end{document} diff --git a/paper/pandoc/iclc.sty b/paper/pandoc/iclc.sty new file mode 100755 index 00000000..b90f8ace --- /dev/null +++ b/paper/pandoc/iclc.sty @@ -0,0 +1,54 @@ + +\def\Hline{\noalign{\hrule height 0.4mm}} +%\newcommand{\bm}[1]{\mbox{\boldmath{$#1$}}} +\newcommand{\figbox}[1]{\fbox{\parbox{\columnwidth}{\centering{ #1 }}}} +\newcommand{\range}[2]{{#1,\cdots,#2\;}} +\newcommand{\secref}[1]{\mbox{Section~\ref{#1}}} +\newcommand{\tabref}[1]{\mbox{Table~\ref{#1}}} +\newcommand{\figref}[1]{\mbox{Figure~\ref{#1}}} +\newcommand{\eqnref}[1]{\mbox{Eq.~(\ref{#1})}} + +\renewcommand{\sfdefault}{phv} +\renewcommand{\rmdefault}{ptm} +\renewcommand{\ttdefault}{pcr} + +\setlength{\paperheight}{297mm} +\setlength{\paperwidth}{210mm} +\setlength{\textheight}{252mm} +\setlength{\textwidth}{172mm} +\setlength{\columnsep}{8mm} +\setlength{\headheight}{0mm} +\setlength{\voffset}{-12mm} +\setlength{\hoffset}{0mm} +\setlength{\marginparwidth}{0mm} +\setlength{\parindent}{2mm} %1pc +\setlength{\topmargin}{-5mm} +\setlength{\oddsidemargin}{-6mm} +\setlength{\evensidemargin}{-6mm} + +\setlength\normallineskip{1\p@} +\setlength\parskip{0\p@ \@plus \p@} +%\def\baselinestretch{0.98} + +\def\normalsize{\@setsize\normalsize{12pt}\xpt\@xpt} +\def\small{\@setsize\small{10pt}\ixpt\@ixpt} +\def\footnotesize{\@setsize\footnotesize{8pt}\viiipt\@viiipt} +\def\scriptsize{\@setsize\scriptsize{8pt}\viipt\@viipt} +\def\tiny{\@setsize\tiny{7pt}\vipt\@vipt} +\def\large{\@setsize\large{14pt}\xiipt\@xiipt} +\def\Large{\@setsize\Large{16pt}\xivpt\@xivpt} +\def\LARGE{\@setsize\LARGE{20pt}\xviipt\@xviipt} +\def\huge{\@setsize\huge{23pt}\xxpt\@xxpt} +\def\Huge{\@setsize\Huge{28pt}\xxvpt\@xxvpt} + +\pagestyle{empty} + +\def\abstract{ + \begin{center}{ + \bf ABSTRACT + } + \end{center} +} +\def\endabstract{\par} + +\flushbottom diff --git a/paper/references.bib b/paper/references.bib new file mode 100644 index 00000000..1d8239cc --- /dev/null +++ b/paper/references.bib @@ -0,0 +1,58 @@ +@book{Greenewalt46, + author = {Greenewalt, Mary H.}, + publisher = {Philadelphia. Pa. Westbrook}, + title = {Nourathar, the Fine Art of Light Color Playing}, + year = {1946} +} + +@article{Miller56, + author = {Miller, G. A.}, + issn = {0033-295X}, + journal = {Psychological review}, + number = {2}, + pages = {81--97}, + title = {The magical number seven plus or minus two: some limits on our capacity for processing information.}, + volume = {63}, + year = {1956} +} + +@article{Schwitters32, + author = {Schwitters, Kurt}, + journal = {Merz}, + publisher = {Merz}, + title = {Ursonate}, + volume = {24}, + year = {1932} +} + +@inbook{forABookChapter, + author = {LastName, FirstName and Last, First}, + year= {2013}, + title = {insert title of book chapter here}, + booktitle = {insert title of book here}, + pages = {162--165}, + editor = {Lastname, Firstname}, + publisher = {Big Important Publishing Company}, + address = {Atlantis, Underwater Republic} +} + +@proceedings{nime2014, + title = {Proceedings of the International Conference on New Interfaces for Musical Expression 2017}, + shorttitle = {NIME-2017}, + eventdate = {2017-05-15/2017-05-20}, + venue = {Copenhagen, Denmark}, + date = {2014} +} + +@inproceedings{SomethingFromNime2016, + author = {Lastname, firstname and lastname, firstname}, + year = {2016}, + title = {Title of specific item here}, + crossref = {nime2016}, + pages = {190--197} +} + +@misc {forSomethingDifferent, + author={LastNameForCitation, FirstNameForCitation}, + url={http://nowhere.nowhere} +} diff --git "a/paper/\302\262" "b/paper/\302\262" new file mode 100644 index 00000000..e9a8dba7 --- /dev/null +++ "b/paper/\302\262" @@ -0,0 +1,187 @@ +--- +# This template is licensed under a Creative Commons 0 1.0 Universal License (CC0 1.0). Public Domain Dedication. + +title: 'Sardine: a Modular Python Live Coding Environment' +author: + - name: Raphaël Forment + affiliation: Université Jean Monnet (Saint Étienne, ECLLA) + email: raphael.forment@gmail.com +abstract: | + Sardine is a live coding environment and library for Python 3.10+ focusing on the modularity and extensibility of its base components (clocks, parser, *handlers*). Sardine has been designed to be easily integrated with existing *live-coding* environments as both a tool for experimentation and demonstration of various live coding techniques : temporal recursion, patterning, integration in various hardware and software setups. Although the tool is still in active development, it has already been used in multiple performances and algoraves. This paper is dedicated to the introduction of the **Sardine** system and the explanation of the main guidelines that currently guide the development work. It will also present the preliminary results of our work through practical realizations that served as objectives and experimental validations for the first stages of development. +fontsize: 11pt +geometry: margin=2cm +fontfamily: libertine +fontfamily: inconsolata +mainfont: Linux Libertine O +monofont: Inconsolata +bibliography: references.bib +... + +# Introduction + +The Python language, with its large collection of libraries and modules, makes for a suitable environment for implementing a live coding library. The popularity of Python ensures good user support and many options for customization and integration into different text editors and running environments. The possibility to enjoy a powerful ecosystem of libraries focused on input/output, network communication or data processing makes it a prime target for the implementation of a library focused on experimentation in digital interactive media. Moreover, thanks to its lightweight syntax, Python can be read by programmers coming from different domains with a minimal adaptation time, making it a great platform for sharing information, collaborating and demonstrating the implementation of a given feature. + +**Sardine** is born out of a curiosity for similar Python-based live-coding libraries such as [FoxDot](https://foxdot.org), [Isobar](https://github.com/ideoforms/isobar) or the more recent [TidalVortex](https://github.com/tidalcycles/vortex). It is also best described as an attempt to re-create and bring the concept of temporal recursion and TidalCycles patterning to Python. + +# Methology and objectives + +# Sardine implementation + +## Clock and Scheduling System + +# Sardine usage + +## Algorave and performance + +## Controlling Legacy MIDI Synthesizers + +## Usage of Sardine at the IIL Laboratory + +Welcome to the markdown (aka commonmark) template for the +International Conference on Live Coding 2023. + +This document is a guide to using markdown for the conference, and is +itself written in markdown. For full understanding, refer to +iclc2023.txt to see the source of this document, and iclc2023.pdf to +see the typeset output. Use of this template is currently only +recommended for those familiar with commandline tools. + +We suggest you take a copy of this template (iclc2023.txt), and use it +as a starting point for your ICLC paper. + +Preparing your submission using markdown will enable us to make proceedings +available both in PDF files suitable for print, and in HTML suitable +for the web. This is useful for making sure your paper is fully +accessible, via Internet search, and with assistive technology such as +screen readers for blind people. We recommend taking a straightforward approach to +formatting your document. + +If you do not wish to use markdown, please do not be discouraged from +submitting your paper. There is also a word document template +available from the conference website. + +# Learning and using markdown + +We are happy to answer any questions you have about markdown in connection with your +conference submission. + +If you have questions, you can email us directly: + [iclc@creativecodingutrecht.nl](mailto:iclc@creativecodingutrecht.nl) + +## Running pandoc {#pandoc} + +Pandoc is software which turns text written in markdown into a +beautiful looking document, complete with references. You will need to +run it to create PDF documents of your paper for checking and +uploading for peer review. + +You may download pandoc for all major operating systems (including MS +Windows, Apple Mac OS and GNU/Linux) from the following website: +[http://pandoc.org](http://pandoc.org) + +As an alternative to the above downloads, on OS X only, the homebrew +package manager can be used to install pandoc: [http://brew.sh/](http://brew.sh/) + +If you use homebrew to install on OS X you will need to install the pandoc package as follows: +``` +brew update +brew install pandoc +``` + +To produce PDF files you will need to have LaTeX installed, as well as +pandoc. See the pandoc website for installation instructions: +[http://pandoc.org/installing.html](http://pandoc.org/installing.html). LaTeX +is used internally, you will not have to edit any LaTeX documents. + +To render your markdown source as HTML, open a terminal window, change +into the folder where the template is and run the following command: + +~~~~ {.bash} +pandoc --template=pandoc/iclc.html --citeproc --number-sections iclc2023.md -o iclc2023.html +~~~~ + +To produce a PDF document, make sure you have LaTeX installed (see +above), and run the following: + +~~~~ {.bash} +pandoc --template=pandoc/iclc.latex --citeproc --number-sections iclc2023.md -o iclc2023.pdf +~~~~ + +For a higher quality output, add the option `--latex-engine=xelatex` +to the above. You will need the [Inconsolata](http://levien.com/type/myfonts/inconsolata.html) and [Linux Libertine](http://www.linuxlibertine.org/index.php?id=91&L=1) opentype fonts installed. + +An example Makefile is also provided to run these commands for you. If you have *make* installed, you can use it to build the pdf files. + +## Bibliographic references + +Pandoc accepts bibliographic databases in a range of formats, so make +sure you have the right extension on your file. + + Format File extension + ------------ -------------- + MODS .mods + BibLaTeX .bib + BibTeX .bibtex + RIS .ris + EndNote .enl + EndNote XML .xml + ISI .wos + MEDLINE .medline + Copac .copac + JSON citeproc .json + +Table: Supported bibliography formats with file extension. + +Authors may be referenced in two ways; inline, e.g. @Schwitters32 +wrote the Ursonate sound poem, or in parenthesis, e.g. Ursonate is a +sound poem [@Schwitters32]. Multiple references should be grouped +together like so [@Schwitters32;@Miller56;@Greenewalt46]. + +The pandoc command given in the [above section](#pandoc) will automatically +render your references according to Chicago author-date style. + +At the head of the markdown source file for this template, you will see an entry +for "bibliography" that points to the file references.bib. Here you'll find +examples of bibliography entries in BibLaTex format, including examples for +articles, books, book chapters and items from conference proceedings. + +## Code + +We have chosen a single column layout to better support code examples +without having to break lines. The following shows how to include a +code example with syntax highlighting: + +~~~~ {.haskell} +d1 $ every 3 (iter 4) $ brak $ "bd [sn [[sn bd] sn]]*1/3" +~~~~ + +For more information please visit this page: +[https://hackage.haskell.org/package/skylighting] + +## Figures + +Images should be included as figures, with captions provided and +formatted as shown in Figure 1. Be prepared for the page layout and +image size to be changed during the editing and layout process, and +consider this when referring to the figures in the text. + +![*A descriptive caption should be given for all figures, understandable without reference to the rest of the article.*](images/pomeroy.jpg) + +# Conclusion + +We look forward to receiving your completed papers. Submission is through the +online peer review system at https://iclc2023.creativecodingutrecht.nl/openconf.php only. +Do not send papers directly by e-mail. In all cases, please submit a PDF version of +your paper for peer review. At a later stage in preparing the proceedings, we may ask +for the markdown or Word versions of your paper. + +## Acknowledgments + +I warmly thank my thesis supervisors Laurent Pottier and Alain Bonardi for their support and advice in the creation of this tool. I thank the doctoral school *3LA* from the University of Lyon for the funding it provided to this research. I extend my thanks to the musicians and friends who allowed me to take **Sardine** on stage and to present it to a wider audience these few last months: the **Cookie Collective**, Rémi Georges, etc... + + +At the end of the Conclusions, acknowledgements to people, projects, funding +agencies, etc. can be included after the second-level heading “Acknowledgments”. + + +# References diff --git a/pyproject.toml b/pyproject.toml index fffd6e4f..29779793 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,56 +1,95 @@ -[tool.poetry] +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] name = "sardine" -version = "0.1.0" +dynamic = ["version"] description = "Python's missing algorave module" -authors = ["Raphael Forment "] -license = "GPL-3.0" +authors = [ + {name = "Raphaël Forment", email="raphael.forment@gmail.com"}, + {email = "raphael.forment@gmail.com"} +] +license = {file = "LICENSE.txt"} readme = "README.md" +requires-python = ">=3.10" + +keywords = ["algorave", "live-coding", "music", "patterns"] + classifiers = [ - "Topic :: Software Development :: Build Tools", - "Topic :: Software Development :: Libraries :: Python Modules", - "Development Status :: 3 - Alpha", - "Intended Audience :: Developers, Musicians", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3 :: Only", ] -[tool.poetry.dependencies] -python = ">=3.9,<4.0.0" +# Requirements: This is done differently by poetry! +dependencies = [ + "Click>=8.1.3", + "LinkPython>=0.1", + "python-rtmidi>=1.4.9", + "inquirerpy>=0.3.4", + "easing-functions>=1.0.4", + "mido>=1.2", + "osc4py3>=1.0.8", + "uvloop>=0.16; sys_platform != 'win32'", + "appdirs>=1.4", + "psutil>=5.9", + "rich>=12.5", + "lark>=1.1", + "exceptiongroup>=1.0.4", +] -LinkPython = "^0.1" -python-rtmidi = [ - {version = "^1.4.9"} +[tool.hatch.version] +path = "sardine/__init__.py" + +[tool.isort] +profile = "black" +skip_gitignore = true + +[tool.setuptools] +py-modules = [] + +[project.optional-dependencies] +dev = [ + "black>=22.8", + "pylint>=2.15", + "pytest>= 7.2.0", + "pytest-asyncio>=0.20.2", +] +profile = [ + "yappi>=1.4.0", ] -inquirerpy = "^0.3.4" -easing-functions = "^1.0.4" -mido = "^1.2.10" -osc4py3 = "^1.0.8" -uvloop = {version = "^0.16.0", markers = "sys_platform == 'linux' or sys_platform == 'darwin'"} -click = "^8.1.3" -appdirs = "^1.4" -psutil = "^5.9" -rich = "^12.5.1" -lark = "^1.1.2" -pylint = "^2.15.0" - -[tool.poetry.group.dev.dependencies] -black = "^22.8.0" -[build-system] -requires = ["poetry-core", "wheel", "setuptools-cpp", "setuptools"] -build-backend = "poetry.core.masonry.api" +[project.urls] +homepage = "https://sardine.raphaelforment.fr" +documentation = "https://sardine.raphaelforment.fr" +repository = "https://github.com/Bubobubobubobubo/sardine" +"Bug Tracker" = "https://github.com/Bubobubobubobubo/sardine/issues" -[tool.poetry.scripts] -sardine-config-python = 'cli.main:edit_python_configuration' -sardine-config-superdirt = 'cli.main:edit_superdirt_configuration' -sardine-config = 'cli.main:main' +[project.scripts] +fishery = 'fishery.__main__:main' +sardine-config-python = 'sardine.cli.main:edit_python_configuration' +sardine-config-superdirt = 'sardine.cli.main:edit_superdirt_configuration' +sardine-config = 'sardine.cli.main:main' -[tool.cibuildwheel.linux] -repair-wheel-command = "" +[tool.black] +target_version = ['py311'] +include = '\.pyi?$' +exclude = ''' +( + /( + \.eggs # exclude a few common directories in the + | \.git # root of the project + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist + )/ +) +''' -[tool.cibuildwheel.macos] -repair-wheel-command = "" +[tool.pytest.ini_options] +addopts = "--import-mode=importlib" diff --git a/sardine/__init__.py b/sardine/__init__.py index 982b6702..4aecb24e 100644 --- a/sardine/__init__.py +++ b/sardine/__init__.py @@ -1,413 +1,8 @@ -# SARDINE: this is the main entry point for Sardine. __init__.py will attempt -# to load everything needed for an interactive session directly from here. -# Linters might complain about the fact some objects are not accessed. Sure, -# they are not accessed right now but will later in time when the user will -# start interacting with the system. - -from random import random, randint, choice -from math import floor -from rich import print -import asyncio -import warnings -import sys -import os -import importlib - -try: - import uvloop -except ImportError: - print("[yellow]UVLoop is not installed. Not supported on Windows![/yellow]") - print("[yellow]Rhythm accuracy may be impacted[/yellow]") -else: - uvloop.install() - -from random import random, randint, choice -from typing import Union, Any -from pathlib import Path -from rich.console import Console -from rich.markdown import Markdown -from rich import pretty -from rich.panel import Panel +from . import base, clock, event_loop, fish_bowl, handlers, scheduler, sequences +from .base import * from .clock import * -from .superdirt import SuperColliderProcess -from .io import Client as OSC -from .io import Receiver as Receiver -from .io import OSCSender, MIDISender -from .io import read_user_configuration, pretty_print_configuration_file -from .io import open_sardinopedia as open_help -from .io import print_sardinopedia as help -from .io import ClockListener, MidiListener, ControlTarget, NoteTarget - -from .sequences import ListParser -from .sequences.Iterators import Iterator -from .sequences.Variables import Variables -from .sequences.Sequence import E, euclid, mod, imod, pick, text_eater -from .sequences.LexerParser.FuncLibrary import qualifiers -from .sequences import Player, PatternHolder -from .sequences import * - -warnings.filterwarnings("ignore") -# Use rich print by default -pretty.install() - - -def _ticked(condition: bool): - """Print an ASCII Art [X] if True or [ ] if false""" - return "[X]" if condition else "[ ]" - - -# Reading / Creating / Updating the configuration file -config = read_user_configuration() -print_config = pretty_print_configuration_file - -sardine_intro = """ -░██████╗░█████╗░██████╗░██████╗░██╗███╗░░██╗███████╗ -██╔════╝██╔══██╗██╔══██╗██╔══██╗██║████╗░██║██╔════╝ -╚█████╗░███████║██████╔╝██║░░██║██║██╔██╗██║█████╗░░ -░╚═══██╗██╔══██║██╔══██╗██║░░██║██║██║╚████║██╔══╝░░ -██████╔╝██║░░██║██║░░██║██████╔╝██║██║░╚███║███████╗ -╚═════╝░╚═╝░░╚═╝╚═╝░░╚═╝╚═════╝░╚═╝╚═╝░░╚══╝╚══════╝ - -Sardine is a MIDI/OSC sequencer made for live-coding -Play music, read the docs, contribute, and have fun! -WEBSITE: [yellow]https://sardine.raphaelforment.fr[/yellow] -GITHUB: [yellow]https://github.com/Bubobubobubobubo/sardine[/yellow] -""" - -# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# -# Here starts the complex and convoluted session setup process. # -# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# - -# Booting SuperCollider / SuperDirt -SC = None - -# Initialize the default Clock -c = None -cs = again = anew = a = None -cr = stop = None -children = None -S = None -M = None -O = None -MidiSend = None - -# Amphibian iterators and amphibian variables -i = None -v = None - - -def midinote(delay, note: int = 60, velocity: int = 127, channel: int = 1): - """Helper function to send a MIDI Note""" - asyncio.create_task( - c._midi.note(delay=delay, note=note, velocity=velocity, channel=channel) - ) - - -def cc(channel: int = 0, control: int = 20, value: int = 64): - """Control Changes (MIDI). Send a Control Change""" - asyncio.create_task( - c._midi.control_change(channel=channel, control=control, value=value) - ) - - -def pgch(channel: int = 0, program: int = 0): - """Program Changes (MIDI). Send a Program Change""" - asyncio.create_task(c._midi.program_change(channel=channel, program=program)) - - -def pwheel(channel: int = 0, pitch: int = 0): - """Pitchwheel (MIDI). Send a pitchweel message. For people looking at - the modwheel, this is usually done through control changes.""" - asyncio.create_task(c._midi.pitchwheel(channel=channel, pitch=pitch)) - - -def sysex(data: list[int]): - """ - Sysex Messages (MIDI). Non-standard MIDI messages, usually used by - some manufacturers to send custom messages and to provide more detailed - controls. Frequently used on older synths. - """ - asyncio.create_task(c._midi.sysex(data)) - - -def swim(fn): - """ - Swimming decorator: push a function to the clock. The function will be - declared and followed by the clock system to recurse in time if needed. - """ - cs(fn) - return fn - - -def die(fn): - """ - Swimming decorator: remove a function from the clock. The function will not - be called again and will likely stop recursing in time. - """ - cr(fn) - return fn - - -drown = die - - -def sleep(n_beats: Union[int, float]): - """Artificially sleep in the current function for `n_beats`. - - Example usage: :: - - @swim - def func(delay=4): - sleep(3) - for _ in range(3): - S('909').out() - sleep(1/2) - cs(func) - - This should *only* be called inside functions scheduled by the clock. - Calling this outside of a scheduled function will result in - abnormal behavior such as overlapping sounds and clock desync. - - Using in asynchronous functions - ------------------------------- - - This can be used in `async def` functions and does *not* need to be awaited. - - Sounds scheduled in asynchronous functions will be influenced by - real time passing. For example, if you sleep for 48 ticks and - await a function that takes 5 ticks to complete, any sounds sent - afterwards will occur 53 ticks from when the function was called (48 + 5). - - :: - - @swim - async def func(delay=4): - print(c.tick) # 0 - - sleep(1) # +48 virtual ticks (assuming c.ppqn = 48) - await abc() # +5 real-time ticks - - S('bd').out() # occurs 48 ticks from now - print(c.tick) # 53 - cs(func) - - Technical Details - ----------------- - - Unlike `time.sleep(n)`, this function does not actually block - the function from running. Instead, it temporarily affects the - value of `Clock.tick` and extends the perceived time of other - Clock methods like `Clock.wait_after()` and `Clock.get_beat_ticks()`. - - In essence, this maintains the precision of sound scheduling - without requiring the use of declarative syntax like - `S('909', at=1/2).out()`. - - """ - ticks = c.get_beat_ticks(n_beats, sync=False) - c.tick_shift += ticks - - -# Debugging parser: pure Sardine pattern syntax parser. Used for debugging when -# developping Sardine. Will print the AST and result of a given operation. - - -def parser(pattern: str): - """Parse a single expression and get result""" - parser = ListParser() - print(parser.parse(pattern)) - - -def parser_repl(parser_type: str) -> None: - """Parse a single expression and get result""" - parser = ListParser( - clock=c, iterators=c.iterators, variables=c.variables, parser_type=parser_type - ) - - def _exit_case(string): - if string.lower() == "exit": - return True - - try: - while True: - user_input = input("> ") - if _exit_case(user_input): - break - else: - p = parser._parse_debug(pattern=user_input) - except KeyboardInterrupt: - pass - - -def lang_debug() -> None: - """Debug mode for language dev""" - return parser_repl(parser_type="proto") - - -# Interface to the patterning system -def Pat(pattern: str, i: int = 0, div: int = 1, rate: int = 1) -> Any: - """Generates a pattern - - Args: - pattern (str): A pattern to be parsed - i (int, optional): Index for iterators. Defaults to 0. - - Returns: - int: The ith element from the resulting pattern - """ - parser = c.parser - result = parser.parse(pattern) - - def _pattern_element(div: int, rate: int, iterator: int, pattern: list) -> Any: - """Joseph Enguehard's algorithm for solving iteration speed""" - return floor(iterator * rate / div) % len(pattern) - - return result[_pattern_element(div=div, rate=rate, iterator=i, pattern=result)] - - -def print_scales() -> None: - """Print the list of built-in scales and chords""" - """Print all available scales in the patterning system""" - print(qualifiers.keys()) - - -def panic() -> None: - """Panic function, will cut everything""" - hush() # Stop everything - S("superpanic").out() # Superpanic is a synth capable of - # cutting every other synth - - -def print_qualities(): - """Return the list of qualifiers""" - return qualifiers - - -class Delay: - """ - with delay(0.5): - do_stuff() - """ - - def __init__(self, duration: Union[int, float] = 1, delayFirst: bool = True): - """ - This compound statements needs to know two things, already provided - by some default values: - duration: for how long do we wait before or after the block? - delayFirst: are we waiting before or after the block? - """ - self.duration = duration - self.delayFirst = delayFirst - - def __call__(self, duration=1, delayFirst=False): - self.duration = duration - self.delayFirst = delayFirst - return self - - def __enter__(self): - if self.delayFirst: - sleep(self.duration) - - def __exit__(self, exc_type, exc_val, exc_tb): - if not self.delayFirst: - sleep(self.duration) - - -from sys import argv - -hook_path = argv[0] -if "__main__.py" in hook_path: - os.environ["SARDINE_INIT_SESSION"] = "YES" - -if ( - os.getenv("SARDINE_INIT_SESSION") is not None - and os.getenv("SARDINE_INIT_SESSION") == "YES" -): - # Print intro - print(Panel.fit(f"[red]{sardine_intro}[/red]")) - print( - f" [yellow]BPM: [red]{config.bpm}[/red],", - f"[yellow]BEATS: [red]{config.beats}[/red]", - f"[yellow]SC: [red]{_ticked(config.boot_superdirt)}[/red],", - f"[yellow]DEFER: [red]{_ticked(config.deferred_scheduling)}[/red]", - f"[yellow]MIDI: [red]{config.midi}[/red]", - ) - - # Boot SuperCollider - if config.boot_superdirt is True: - try: - SC = SuperColliderProcess( - startup_file=config.superdirt_config_path, # config file - verbose=config.verbose_superdirt, # verbosity for SC output - ) - except OSError as error: - print("[red]SuperCollider could not be found![/red]") - else: - print("[green]Booting without SuperCollider![/green]") - - # Init default clock - c = Clock( - midi_port=config.midi, # default MIDI port - bpm=config.bpm, # default BPM configuration - beats_per_bar=config.beats, # default beats per bar - ppqn=config.ppqn, # default pulses per quarter note (MIDI/Clock related) - deferred_scheduling=config.deferred_scheduling, # Clock related - debug=config.debug, # Debug mode for printing every pattern - ) - # Synonyms for swimming function management - cs = again = anew = a = c.schedule_func # aliases for recursion - cr = stop = c.remove - children = c.print_children - # Senders: the most important I/O objects - S = c.note # default SuperDirt Sender - M = c.midinote # default Midi Sender - O = c.oscmessage # default OSC Sender - MidiSend = MIDISender - - # Start default clock - c.start(active=config.active_clock) - - # Load user config - if Path(f"{config.user_config_path}").is_file(): - spec = importlib.util.spec_from_file_location( - "user_configuration", config.user_config_path - ) - module = importlib.util.module_from_spec(spec) - sys.modules[spec.name] = module - spec.loader.exec_module(module) - from user_configuration import * - else: - print(f"[red]No user provided configuration file found...") - - # Init amphibian - i, v = c.iterators, c.variables - P = Pat - - # Quickstep functionality (similar to FoxDot) - __quickstep_patterns = PatternHolder( - clock=c, MIDISender=M, SuperDirtSender=S, OSCSender=O - ) - for (key, value) in __quickstep_patterns._patterns.items(): - globals()[key] = value - c.schedule_func(__quickstep_patterns._global_runner) - surf = __quickstep_patterns - play, play_midi, play_osc = Player.play, Player.play_midi, Player.play_osc - - def hush(*args): - """ - Name taken from Tidal. This is the most basic function to stop function(s) - from being called again. Will silence all functions by default. You can - also specify one or more functions to be stopped, keeping the others alive. +from .fish_bowl import * +from .handlers import * +from .scheduler import * - This function has been updated to take into account the new Quickstep patterns. - """ - try: - if len(args) >= 1: - for runner in args: - c.remove(runner) - else: - for name, runner in c.runners.items(): - if name != "_global_runner": - runner.stop() - finally: - __quickstep_patterns.reset() +__version__ = "0.2.0" diff --git a/sardine/base/__init__.py b/sardine/base/__init__.py new file mode 100644 index 00000000..2a124a14 --- /dev/null +++ b/sardine/base/__init__.py @@ -0,0 +1,4 @@ +from .clock import * +from .handler import * +from .parser import * +from .runner import * diff --git a/sardine/base/clock.py b/sardine/base/clock.py new file mode 100644 index 00000000..7d4c409a --- /dev/null +++ b/sardine/base/clock.py @@ -0,0 +1,257 @@ +import math +import time +from abc import ABC, abstractmethod +from typing import Optional, Union + +from .runner import BaseRunnerHandler + +__all__ = ("BaseClock",) + + +def _round_float(n: float, prec: int = 3): + s = format(n, f".{prec}f") + return s.rstrip("0").rstrip(".") + + +class BaseClock(BaseRunnerHandler, ABC): + """The base for all clocks to inherit from. + + This interface expects clocks to manage its own source of time + and provide the `phase`, `beat`, and `tempo` properties. + + In addition, an optional `async sleep()` method can be defined + to provide a mechanism for sleeping a specified duration. + If this method is not defined, the `FishBowl.sleeper` instance + will use a built-in polling mechanism for sleeping. + """ + + def __init__(self): + super().__init__() + self._time_is_origin: bool = True + + def __repr__(self) -> str: + status = "running" if self.is_running() else "stopped" + return ( + "<{name} {status} time={0.time:.1f}" + " tempo={tempo} beats_per_bar={0.beats_per_bar}>" + ).format( + self, + name=type(self).__name__, + status=status, + tempo=_round_float(self.tempo), + ) + + def __str__(self) -> str: + return ("({name} {0.time:.1f}s) -> [{tempo}|{beat}/{bar} {phase:.0%}]").format( + self, + name=type(self).__name__, + tempo=_round_float(self.tempo, 3), + beat=self.beat % self.beats_per_bar + 1, + bar=self.beats_per_bar, + phase=self.phase / self.beat_duration, + ) + + # Abstract methods + + @abstractmethod + async def run(self): + """The main run loop of the clock. + + This should setup any external time source, assign the current + time to `internal_origin`, and then continuously + update the `internal_time`. + """ + + @property + @abstractmethod + def bar(self) -> int: + """The bar of the clock's current time. + + This property should account for time shift, but it is not expected + to be consistent across clocks or after any updates to the tempo. + """ + + @property + @abstractmethod + def beat(self) -> int: + """The beat of the clock's current time. + + This property should account for time shift, but it is not expected + to be consistent across clocks or after any updates to the tempo. + """ + + @property + @abstractmethod + def beat_duration(self) -> float: + """The length of a single beat in seconds. + + Typically this is represented as the function `60 / tempo`. + """ + + @property + @abstractmethod + def beats_per_bar(self) -> int: + """The number of beats in each bar. + + This property should account for time shift, but it is not expected + to be consistent across clocks or after any updates to the tempo. + """ + + @property + @abstractmethod + def internal_origin(self) -> Optional[float]: + """The clock's internal time origin if available, measured in seconds. + + At the start of the `run()` method, this should be set as early + as possible in order for the `time` property to compute the + elapsed time. + + This **must** support a setter as the base clock will automatically + set this to the `internal_time` when the fish bowl is resumed. + """ + + @property + @abstractmethod + def internal_time(self) -> Optional[float]: + """The clock's internal time if available, measured in seconds. + + This attribute should be continuously updated when the + clock starts so the `time` property is able to move forward. + """ + + @property + @abstractmethod + def phase(self) -> float: + """The phase of the current beat in the range `[0, beat_duration)`. + + This property should account for time shift, but it is not expected + to be consistent across clocks or after any updates to the tempo. + """ + + @property + @abstractmethod + def tempo(self) -> float: + """The clock's current tempo.""" + + # Properties + + @property + def shifted_time(self) -> float: + """A shorthand for the current time with `Time.shift` added. + + Only the clock is expected to use this property when calculating + the current phase/beat/bar. + """ + return self.time + self.env.time.shift + + @property + def time(self) -> float: + """Returns the current time of the fish bowl. + + This uses the `internal_time` and `internal_origin` attributes + along with the fish bowl's `Time.origin` to calculate a monotonic + time for the entire system. + + If the fish bowl has been paused or stopped, `Time.origin` will be set + to the latest value provided by `internal_time`, and this property + will return `Time.origin` until the fish bowl resumes or starts again. + + If either the `internal_time` or `internal_origin` attributes + are not available, i.e. been set to `None`, this will default + to the `Time.origin` (still including time shift). This should + ideally be avoided when the clock starts running so time can + flow as soon as possible. + """ + if self._time_is_origin: + return self.env.time.origin + + i_time, i_origin = self.internal_time, self.internal_origin + if i_time is None or i_origin is None: + return self.env.time.origin + + return i_time - i_origin + self.env.time.origin + + # Public methods + + def can_sleep(self) -> bool: + """Checks if the clock supports sleeping.""" + # Get the sleep attribute and if it is a bound method, unwrap it + # for the actual function + method = getattr(self, "sleep", None) + method = getattr(method, "__func__", method) + return method is not BaseClock.sleep + + def get_beat_time(self, n_beats: Union[int, float], *, sync: bool = True) -> float: + """Determines the amount of time to wait for N beats to pass. + + Args: + n_beats (Union[int, float]): The number of beats to wait for. + sync (bool): + If True, the duration will be synchronized to an interval + accounting for the current time (and influenced by time shift). + If False, no synchronization is done, meaning the duration + for a given beat and tempo will always be the same. + + Returns: + float: The amount of time to wait in seconds. + """ + interval = self.beat_duration * n_beats + if interval <= 0.0: + return 0.0 + elif not sync: + return interval + + duration = interval - self.shifted_time % interval + + # Due to potential rounding errors, we might get a duration + # that should be, but isn't actually equal to the interval. + # As such, we will replace any durations below a picosecond. + if math.isclose(duration, 0.0, rel_tol=0.0, abs_tol=1e-12): + return interval + + return duration + + def get_bar_time(self, n_bars: Union[int, float], *, sync: bool = True) -> float: + """Determines the amount of time to wait for N bars to pass. + + Args: + n_bars (Union[int, float]): The number of bars to wait for. + sync (bool): + If True, the duration will be synchronized to an interval + accounting for the current time (and influenced by time shift). + If False, no synchronization is done, meaning the duration + for a given bar and tempo will always be the same. + + Returns: + float: The amount of time to wait in seconds. + + """ + return self.get_beat_time(n_bars * self.beats_per_bar, sync=sync) + + async def sleep(self, duration: Union[float, int]) -> None: + """Sleeps for the given duration. + + This method can be optionally overridden by subclasses. + If it is not overridden, it is assumed that the class + does not support sleeping. + + Any implementations of this sleep must be able to handle + `asyncio.CancelledError` on any asynchronous statements. + """ + raise NotImplementedError + + # Handler hooks + + def hook(self, event: str, *args): + super().hook(event, *args) + if event in ("start", "resume"): + # Setting internal origin here is only useful for the resume event, + # unless the clock is able to provide an internal time before + # the clock has started + self.internal_origin = self.internal_time + self._time_is_origin = False + elif event in ("pause", "stop"): + self.env.time.origin = self.time + self._time_is_origin = True + # print(f"{event=} {self.env.time.origin=} {self.time=} " + # f"{self.env.is_paused()=} {self.env.is_running()=}") diff --git a/sardine/base/handler.py b/sardine/base/handler.py new file mode 100644 index 00000000..d8f5a654 --- /dev/null +++ b/sardine/base/handler.py @@ -0,0 +1,216 @@ +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from ..fish_bowl import FishBowl + +__all__ = ("BaseHandler",) + + +class BaseHandler: + """Handles particular events that are dispatched by a fish bowl. + + To add a handler to a fish bowl, the `FishBowl.add_handler()` method + should be called. Handlers can only be tied to one fish bowl at a time. + + Unlike the fish bowl's concept of "hooks", handlers can apply themselves + to multiple events at once and have access to the fish bowl via + the `env` property. + + Handlers can also be given "child handlers" using the + `BaseHandler.add_child()` method. Whenever a parent handler is added + to a fish bowl, its children are automatically added afterwards. + Likewise, when a parent handler is removed, its children are also removed. + + Child handlers can still be manually removed from the fish bowl + after being added. However, child handlers cannot be added to a + fish bowl before the parent handler is added, or be added to + a fish bowl different than the parent's handler. + + This class can also be used directly for grouping handlers that don't + necessarily require anything from each other:: + + group = BaseHandler(lock_children=True) + group.add_child(SomeHandler()) + group.add_child(AnotherHandler()) + + However, if those handlers do depend on each other, it is recommended + to subclass this and add them as attributes of the group, making the + handlers available through the `parent` attribute. + + Args: + lock_children (Optional[bool]): + If True, any child handlers are required to share the same + fish bowl as the parent. Once its children are added to + a fish bowl, they cannot be removed by themselves, and the + parent must be removed instead. + If False, child handlers are freely removable. + If None, this will be deferred to the parent handler's setting. + For handlers without a parent, None is equivalent to False. + """ + + def __init__(self, *, lock_children: Optional[bool] = None): + self.lock_children = lock_children + self._env: "Optional[FishBowl]" = None + self._children: list[BaseHandler] = [] + self._parent: Optional[BaseHandler] = None + + def __call__(self, *args, **kwargs): + """Calls the handler's `hook()` method.""" + self.hook(*args, **kwargs) + + def __repr__(self) -> str: + return "<{} {}>".format( + type(self).__name__, + " ".join( + f"{attr}={getattr(self, attr)!r}" + for attr in ( + "lock_children", + "env", + ) + ), + ) + + @property + def children(self) -> "list[BaseHandler]": + """A list of this handler's immediate children.""" + return self._children.copy() + + @property + def env(self) -> "Optional[FishBowl]": + """The fish bowl (a.k.a. environment) that this handler is added to.""" + return self._env + + @property + def locked(self) -> bool: + """Indicates if this handler is locked by one of its parent handlers.""" + if self.parent is None: + return False + elif self.parent.lock_children is None: + return self.parent.locked + return self.parent.lock_children + + @property + def parent(self) -> "Optional[BaseHandler]": + """The parent of this handler, if any.""" + return self._parent + + # Abstract methods + + def setup(self): + """Called when the handler is added to a fish bowl. + + This method can be used to register itself on specific + (or all) events with `self.register(event)`. + + The fish bowl will assign itself to the handler's `env` attribute + beforehand. + + It is also possible to register other handlers/hooks here as well, + but the fish bowl will not automatically remove those handlers. + The `teardown()` method should be used to remove those handlers + afterwards. + """ + + def teardown(self): + """Called when the handler is being removed from the fish bowl. + + By default, this method does nothing. + + After teardown finishes, the fish bowl will remove any hooks + and set the `env` attribute to None. + """ + + def hook(self, event: str, *args): + """Dispatched by the fish bowl for the handler's registered events.""" + + # Public methods + + def add_child(self, handler: "BaseHandler"): + """Adds another handler as a child of this handler. + + If the parent handler is already added to a fish bowl, this + method will *not* add the child handler to the same fish bowl. + The child handler can still be added to the parent's fish bowl + before or after this is called. + + This method is idempotent; adding the handler more than once + will cause nothing to happen. However, child handlers cannot + be shared with other parent handlers. + + WARNING: this method does not prevent cyclic references from + occurring. Behaviour is undefined when a handler adds any of + its ancestors as a child of itself. + + Args: + handler (BaseHandler): The handler being added. + + Raises: + ValueError: + The handler is either already added to a fish bowl other than + the parent, or is already a child of a different handler, + or was attempting to add itself as a child. + + """ + if handler is self: + raise ValueError(f"{handler!r} cannot be a child of itself") + elif handler.env is not None and handler.env is not self.env: + raise ValueError(f"{handler!r} is already being used by {handler.env!r}") + elif handler.parent is not None: + # FIXME: proper handler cyclic reference prevention (ancestors/descendents) + if handler.parent is self: + return + raise ValueError(f"{handler!r} is already a child of {handler.parent!r}") + + handler._parent = self # pylint: disable=protected-access + self._children.append(handler) + + def remove_child(self, handler: "BaseHandler"): + """Removes an existing child handler from this handler. + + If the child handler was already set up, this method will *not* + remove the child handler from the fish bowl. + + After a handler has been removed, it can be re-used in new fish bowls. + + This method is idempotent; removing the handler when + it has already been removed will cause nothing to happen. + + Args: + handler (BaseHandler): The child handler to remove. + """ + try: + i = self._children.index(handler) + except ValueError: + return + + # The statement below is intentionally commented to let locked + # handlers unbind themselves from their parent if desired: + # if handler.env is not None and handler.locked: + # raise ValueError(f"{handler!r} has been locked by its parent") + + handler._parent = None # pylint: disable=protected-access + self._children.pop(i) + + def register(self, event: Optional[str]): + """Registers the handler for the given event. + + This is a shorthand for doing `self.env.register_hook(event, self)`. + """ + if self.env is None: + raise ValueError( + "handler cannot register hooks until it is added to a FishBowl" + ) + + self.env.register_hook(event, self) + + def unregister(self, event: Optional[str]): + """Unregisters the handler for the given event. + + This is a shorthand for doing `self.env.unregister_hook(event, self)`. + """ + if self.env is None: + raise ValueError( + "handler cannot unregister hooks until it is added to a FishBowl" + ) + + self.env.unregister_hook(event, self) diff --git a/sardine/base/parser.py b/sardine/base/parser.py new file mode 100644 index 00000000..e8839f5c --- /dev/null +++ b/sardine/base/parser.py @@ -0,0 +1,14 @@ +from abc import ABC, abstractmethod +from typing import Any + +from .handler import BaseHandler + +__all__ = ("BaseParser",) + + +class BaseParser(BaseHandler, ABC): + """The interface that fish bowl parsers are expected to implement.""" + + @abstractmethod + def parse(self, expr: str) -> Any: + """Parses the given string into a value.""" diff --git a/sardine/base/runner.py b/sardine/base/runner.py new file mode 100644 index 00000000..19605a3d --- /dev/null +++ b/sardine/base/runner.py @@ -0,0 +1,139 @@ +import asyncio +import concurrent.futures +import threading +from abc import ABC, abstractmethod +from typing import Optional + +from .handler import BaseHandler + +__all__ = ("BaseRunnerMixin", "BaseThreadedLoopMixin", "BaseRunnerHandler") + + +class BaseRunnerMixin(ABC): + """Provides methods for running a background asynchronous function.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._run_task: Optional[asyncio.Task] = None + + @abstractmethod + async def run(self): + """The method that will be executed in the background. + + This method must be ready to handle an `asyncio.CancelledError`. + """ + + def is_running(self) -> bool: + """Indicates if an asyncio task is currently executing `run()`.""" + return self._run_task is not None and not self._run_task.done() + + def start(self) -> bool: + """Starts the `run()` method in the background. + + Returns: + bool: True if the task was started, False otherwise. + """ + allowed = not self.is_running() + if allowed: + self._run_task = asyncio.create_task(self.run()) + return allowed + + def stop(self) -> bool: + """Stops the background task by attempting to cancel it. + + As with any asyncio task, the `run()` method can prevent + cancellation by catching `asyncio.CancelledError`. + + Returns: + bool: True if the task was cancelled, False otherwise. + """ + if self.is_running(): + return self._run_task.cancel() + return False + + +class BaseThreadedLoopMixin(BaseRunnerMixin, ABC): + """Provides methods for running a looping function in another thread. + + Args: + loop_interval (float): + The amount of time to sleep between each iteration. + """ + + def __init__(self, *args, loop_interval: float, **kwargs): + super().__init__(*args, **kwargs) + self.loop_interval = loop_interval + self._run_thread: Optional[threading.Thread] = None + self._completed_event: Optional[asyncio.Event] = None + + @abstractmethod + def loop(self): + """Called on every iteration of the loop.""" + + @abstractmethod + def before_loop(self): + """Called before the loop is about to start.""" + + @abstractmethod + def after_loop(self): + """Called after the loop has stopped.""" + + def _run(self): + try: + self.before_loop() + + fut = asyncio.run_coroutine_threadsafe( + self._completed_event.wait(), self._loop + ) + + try: + while not self._completed_event.is_set(): + self.loop() + + try: + fut.result(timeout=self.loop_interval) + except concurrent.futures.CancelledError: + break + except concurrent.futures.TimeoutError: + pass + finally: + self.after_loop() + finally: + self._completed_event.set() + + async def run(self): + self._completed_event = asyncio.Event() + self._loop = asyncio.get_running_loop() + self._run_thread = threading.Thread(target=self._run) + self._run_thread.start() + + try: + await self._completed_event.wait() + finally: + self._completed_event.set() + + +class BaseRunnerHandler(BaseRunnerMixin, BaseHandler, ABC): + """Adds automatic starting and stopping to a runner using the handler system. + + Subclasses that override `setup()`, `teardown()`, or `hook()`, must call + the corresponding super method. + """ + + TRANSPORT_EVENTS = ("start", "stop", "pause", "resume") + + def setup(self): + for event in self.TRANSPORT_EVENTS: + self.register(event) + + if self.env.is_running(): + self.start() + + def teardown(self): + self.stop() + + def hook(self, event: str, *args): + if event in ("start", "resume"): + self.start() + elif event == "stop": + self.stop() diff --git a/cli/README.md b/sardine/cli/README.md similarity index 100% rename from cli/README.md rename to sardine/cli/README.md diff --git a/cli/main.py b/sardine/cli/main.py similarity index 70% rename from cli/main.py rename to sardine/cli/main.py index 70ec7170..9d05c674 100644 --- a/cli/main.py +++ b/sardine/cli/main.py @@ -1,18 +1,19 @@ -import argparse +import json +from itertools import chain +from pathlib import Path + import click import mido -import json -import sys # Wildcard used in docs.. from appdirs import * -from pathlib import Path -from rich import print -from rich.panel import Panel -from itertools import chain from InquirerPy import inquirer, prompt -from InquirerPy.validator import EmptyInputValidator from InquirerPy.base.control import Choice +from InquirerPy.validator import EmptyInputValidator +from rich import print +from rich.panel import Panel + +from ..io.UserConfig import create_template_configuration_file FUNNY_TEXT = """ ░█████╗░░█████╗░███╗░░██╗███████╗██╗░██████╗░ @@ -25,28 +26,6 @@ This is the configuration tool for Sardine """ - -# def str2bool(v): -# """Boolean validation method for argparse type checking""" -# if v.lower() in ("yes", "true", "t", "y", "1"): -# return True -# elif v.lower() in ("no", "false", "f", "n", "0"): -# return False -# else: -# raise argparse.ArgumentTypeError("Boolean value expected.") -# -# -# def pairwise(iterable): -# """s -> (s0, s1), (s2, s3), (s4, s5), ...""" -# a = iter(iterable) -# return zip(a, a) - - -# ============================================================================ # -# A dead simple argparse configuration tool to edit values stored in config.json -# Automatic type-checking / error raising for each value. -# ============================================================================ # - # Appdirs boilerplate code APP_NAME, APP_AUTHOR = "Sardine", "Bubobubobubo" USER_DIR = Path(user_data_dir(APP_NAME, APP_AUTHOR)) @@ -65,56 +44,6 @@ def write_json_file(data: dict): json.dump(data, jsonFile, indent=4, sort_keys=True) -# def old_main(): -# """Entry method for the argparse parser""" -# -# # Check if the configuration file exists, otherwise, warn user -# if not CONFIG_JSON.is_file(): -# print( -# f"[red]The configuration file is missing.\ -# Please boot Sardine first." -# ) -# exit() -# data = read_json_file() -# -# parser = argparse.ArgumentParser(description="Sardine configuration CLI") -# parser.add_argument("--midi", type=str, help="Default MIDI port") -# parser.add_argument("--bpm", type=float, help="Beats per minute") -# parser.add_argument("--beats", type=int, help="Beats per bar") -# parser.add_argument("--ppqn", type=float, help="ppqn") -# parser.add_argument("--boot_superdirt", type=str2bool, help="Boot SC && SuperDirt") -# parser.add_argument("--debug", type=str2bool, help="Parser debugging mode") -# parser.add_argument( -# "--verbose_superdirt", type=str2bool, help="Toggle SuperDirt textual output" -# ) -# parser.add_argument( -# "--deferred_scheduling", type=str2bool, help="Turn on/off deferred scheduling" -# ) -# parser.add_argument("--active_clock", type=str2bool, help="Active or passive Clock") -# parser.add_argument( -# "--SCconfig", type=str2bool, help="SuperDirt Configuration Path" -# ) -# parser.add_argument( -# "--User Config Path", type=bool, help="Python User Configuration file" -# ) -# -# if len(sys.argv) < 2: -# print(f"[red]{FUNNY_TEXT}") -# print(f"Your configuration file is located at: {USER_DIR}") -# parser.print_help() -# exit() -# -# # Grabing arguments from parser.parse_args() -# args = parser.parse_args() -# to_update = list(chain.from_iterable([x for x in args._get_kwargs()])) -# -# # Iterating over the collected kwargs and write to file if needed -# for name, value in pairwise(to_update): -# if value is not None: -# data["config"][name] = value -# write_json_file(data) -# -# def _edit_configuration(file_name: str): configuration_file = USER_DIR / file_name # If the file already exists, we will read it first before opening editor @@ -198,19 +127,19 @@ def _select_bpm_and_timing(config_file: dict) -> dict: ( f"[red]Tempo: [green]{config_file['bpm']}[/green][/red] | " + f"[red]Beats: [green]{config_file['beats']}[/green][/red] | " - + f"[red]PPQN: [green]{config_file['ppqn']}[/green][/red]" + + f"[red]Link: [green]{config_file['link_clock']}[/green][/red]" ) ) ) - active_clock = inquirer.select( - message="Should the Clock be active or passive?", + link_clock = inquirer.select( + message="Should Sardine default to the LinkClock?", choices=[ - Choice(value=True, enabled=True, name="Active (default)"), - Choice(value=False, name="Passive (for MIDI In)"), + Choice(value=False, enabled=True, name="No (internal clock)"), + Choice(value=True, name="Yes (external clock)"), ], default=None, ).execute() - config_file["active_clock"] = active_clock + config_file["link_clock"] = link_clock tempo = inquirer.number( message="Input a new default tempo (BPM):", min_allowed=20, @@ -225,20 +154,12 @@ def _select_bpm_and_timing(config_file: dict) -> dict: max_allowed=999, ).execute() config_file["beats"] = int(beats) - ppqn = inquirer.number( - message="Select a new number of ppqn (Pulses per Quarter Note):", - min_allowed=1, - max_allowed=999, - default=48, - float_allowed=False, - ).execute() - config_file["ppqn"] = int(ppqn) print( Panel.fit( ( f"[red]Tempo: [green]{config_file['bpm']}[/green][/red] | " + f"[red]Beats: [green]{config_file['beats']}[/green][/red] | " - + f"[red]PPQN: [green]{config_file['ppqn']}[/green][/red]" + + f"[red]Link: [green]{config_file['link_clock']}[/green][/red]" ) ) ) @@ -250,20 +171,43 @@ def _select_supercollider_settings(config_file: dict) -> dict: print( Panel.fit( ( - f"[red]Boot SuperCollider: [green]{config_file['boot_superdirt']}[/red][/green] | " + f"[red]SuperDirt Handler: [green]{config_file['superdirt_handler']}[/red][/green] | " + + f"[red]Boot SuperCollider: [green]{config_file['boot_supercollider']}[/red][/green] | " + + f"[red]Sardine boot file: [green]{config_file['sardine_boot_file']}[/red][/green] | " + f"[red]SuperCollider boot Path: [green]{config_file['superdirt_config_path']}[/red][/green]" ) ) ) boot = inquirer.select( - message="Boot SuperCollider along with Sardine?", + message="Add SuperDirt handler to Sardine?", + choices=[ + Choice(value=True, enabled=True, name="Yes"), + Choice(value=False, name="No"), + ], + default=None, + ).execute() + config_file["superdirt_handler"] = boot + + boot_supercollider = inquirer.select( + message="Boot a SuperCollider instance?", choices=[ Choice(value=True, enabled=True, name="Yes"), Choice(value=False, name="No"), ], default=None, ).execute() - config_file["boot_superdirt"] = boot + config_file["boot_supercollider"] = boot_supercollider + + sardine_boot_file = inquirer.select( + message="Use Sardine boot file (in config folder)?", + choices=[ + Choice(value=True, enabled=True, name="Yes"), + Choice(value=False, name="No"), + ], + default=None, + ).execute() + config_file["sardine_boot_file"] = sardine_boot_file + verbose_superdirt = inquirer.select( message="Turn on verbose output for SuperCollider?", choices=[ @@ -283,7 +227,9 @@ def _select_supercollider_settings(config_file: dict) -> dict: print( Panel.fit( ( - f"[red]Boot SuperCollider: [green]{config_file['boot_superdirt']}[/red][/green] | " + f"[red]SuperDirt Handler: [green]{config_file['superdirt_handler']}[/red][/green] | " + + f"[red]Boot SuperCollider: [green]{config_file['boot_supercollider']}[/red][/green] | " + + f"[red]Sardine boot file: [green]{config_file['sardine_boot_file']}[/red][/green] | " + f"[red]SuperCollider boot Path: [green]{config_file['superdirt_config_path']}[/red][/green]" ) ) @@ -296,7 +242,7 @@ def _select_additional_options(config_file: dict) -> dict: print( Panel.fit( ( - f"[red]Debug mode: [green]{config_file['boot_superdirt']}[/green][/red] | " + f"[red]Debug mode: [green]{config_file['debug']}[/green][/red] | " + f"[red]User config path: [green]{config_file['user_config_path']}[/green][/red]" ) ) @@ -319,7 +265,7 @@ def _select_additional_options(config_file: dict) -> dict: print( Panel.fit( ( - f"[red]Debug mode: [green]{config_file['boot_superdirt']}[/green][/red] | " + f"[red]Debug mode: [green]{config_file['debug']}[/green][/red] | " + f"[red]User config path: [green]{config_file['user_config_path']}[/green][/red]" ) ) @@ -337,7 +283,15 @@ def main(): Just like before, we are building a monolothic configuration dict that we inject into the current config.json file. Not fancy but cool nonetheless! """ - MENU_CHOICES = ["Show Config", "MIDI", "Clock", "SuperCollider", "More", "Exit"] + MENU_CHOICES = [ + "Show Config", + "Reset", + "MIDI", + "Clock", + "SuperCollider", + "More", + "Exit", + ] try: USER_CONFIG = read_json_file()["config"] except FileNotFoundError as e: @@ -364,6 +318,9 @@ def main(): exit() else: continue + elif menu_select == "Reset": + create_template_configuration_file(CONFIG_JSON) + USER_CONFIG = read_json_file()["config"] elif menu_select == "Show Config": print(USER_CONFIG) elif menu_select == "MIDI": diff --git a/sardine/clock/AsyncRunner.py b/sardine/clock/AsyncRunner.py deleted file mode 100644 index 8baad378..00000000 --- a/sardine/clock/AsyncRunner.py +++ /dev/null @@ -1,443 +0,0 @@ -from typing import Any, TYPE_CHECKING, Union -from dataclasses import dataclass, field -from collections import deque -from rich.panel import Panel -from rich import print -import traceback -import functools -import asyncio -import inspect - -if TYPE_CHECKING: - from . import Clock, TickHandle - from .Clock import MaybeCoroFunc - -__all__ = ("AsyncRunner", "FunctionState") - -MAX_FUNCTION_STATES = 3 - - -def print_panel(text: str) -> None: - """ - Print swimming function event inside a Rich based Panel. - The box is automatically resized to fit text length. - """ - print("\n", Panel.fit(text), end="") - - -def _assert_function_signature(sig: inspect.Signature, args, kwargs): - if args: - message = "Positional arguments cannot be used in scheduling" - if missing := _missing_kwargs(sig, args, kwargs): - message += "; perhaps you meant `{}`?".format( - ", ".join(f"{k}={v!r}" for k, v in missing.items()) - ) - raise TypeError(message) - - -def _discard_kwargs(sig: inspect.Signature, kwargs: dict[str, Any]) -> dict[str, Any]: - """Discards any kwargs not present in the given signature.""" - MISSING = object() - pass_through = kwargs.copy() - - for param in sig.parameters.values(): - value = kwargs.get(param.name, MISSING) - if value is not MISSING: - pass_through[param.name] = value - - return pass_through - - -def _extract_new_delay( - sig: inspect.Signature, kwargs: dict[str, Any] -) -> Union[float, int]: - delay = kwargs.get("d") - if delay is None: - param = sig.parameters.get("d") - delay = getattr(param, "default", 1) - - if not isinstance(delay, (float, int)): - raise TypeError(f"Delay must be a float or integer, not {delay!r}") - elif delay <= 0: - raise ValueError(f"Delay must be >0, not {delay}") - - return delay - - -def _missing_kwargs( - sig: inspect.Signature, args: tuple[Any], kwargs: dict[str, Any] -) -> dict[str, Any]: - required = [] - defaulted = [] - for param in sig.parameters.values(): - if param.kind in ( - param.POSITIONAL_ONLY, - param.VAR_POSITIONAL, - param.VAR_KEYWORD, - ): - continue - elif param.name in kwargs: - continue - elif param.default is param.empty: - required.append(param.name) - else: - defaulted.append(param.name) - - guessed_mapping = dict(zip(required + defaulted, args)) - return guessed_mapping - - -async def _maybe_coro(func, *args, **kwargs): - if inspect.iscoroutinefunction(func): - return await func(*args, **kwargs) - return func(*args, **kwargs) - - -@dataclass -class FunctionState: - func: "MaybeCoroFunc" - args: tuple - kwargs: dict - - -@dataclass -class AsyncRunner: - """Handles calling synchronizing and running a function in - the background, with support for run-time function patching. - - This class should only be used through a Clock instance via - the `Clock.schedule_func()` method. - - The `deferred` parameter is used to control whether AsyncRunner - runs with an implicit tick shift when calling its function or not. - This helps improve sound synchronization by giving the function - a full beat to execute rather than a single tick. - For example, assuming bpm = 120 and ppqn = 48, `deferred=False` - would require its function to complete within 10ms (1 tick), - whereas `deferred=True` would allow a function with `d=1` - to finish execution within 500ms (1 beat) instead. - - In either case, if the function takes too long to execute, it will miss - its scheduling deadline and cause an unexpected gap between function calls. - Functions must complete within the time span to avoid this issue. - - """ - - clock: "Clock" - deferred: bool = field(default=True) - states: list[FunctionState] = field( - default_factory=functools.partial(deque, maxlen=MAX_FUNCTION_STATES) - ) - - interval_shift: int = field(default=0, repr=False) - """ - The number of ticks to offset the runner's interval. - - An interval defines the number of ticks between each execution - of the current function. For example, a clock with a ppqn of 24 - and a delay of 2 beats means each interval is 48 ticks. - - Through interval shifting, a function can switch between different - delays and then compensate for the clock's current tick to avoid - the next immediate beat being shorter than the expected interval. - - Initially, functions have an interval shift of 0. The runner - will automatically change its interval shift when the function - schedules itself with a new delay. This can lead to functions - with the same delay running at different ticks. To synchronize - these functions together, their interval shifts should be set - back to 0 or at least the same value. - """ - - _swimming: bool = field(default=False, repr=False) - _stop: bool = field(default=False, repr=False) - _task: Union[asyncio.Task, None] = field(default=None, repr=False) - _reload_event: asyncio.Event = field(default_factory=asyncio.Event, repr=False) - - _can_correct_delay: bool = field(default=False, repr=False) - _delta: int = field(default=0, repr=False) - _expected_interval: int = field(default=0, repr=False) - _last_delay: Union[float, int] = field(default=0.0, repr=False) - - # State management - - def push(self, func: "MaybeCoroFunc", *args, **kwargs): - """Pushes a function state to the runner to be called in the next iteration.""" - if not self.states: - state = FunctionState(func, args, kwargs) - - # Once the runner starts it needs the `_last_delay` for interval correction, - # and since we are in a convenient spot we will populate it here - signature = inspect.signature(func) - self._last_delay = _extract_new_delay(signature, kwargs) - - return self.states.append(state) - - last_state = self.states[-1] - - if func is last_state.func: - # Function reschedule, patch the top-most state - last_state.args = args - last_state.kwargs = kwargs - self._allow_delay_correction() - else: - # New function, transfer arguments from last state if possible - # (any excess arguments here should be discarded by `_runner()`) - args = args + last_state.args[len(args) :] - kwargs = last_state.kwargs | kwargs - self.states.append(FunctionState(func, args, kwargs)) - - def reload(self): - """Triggers an immediate state reload. - - This method is useful when changes to the clock occur, - or when a new function is pushed to the runner. - - """ - self._reload_event.set() - - # Lifecycle control - - def start(self): - """Initializes the background runner task. - - :raises RuntimeError: - This method was called after the task already started. - - """ - if self._task is not None: - raise RuntimeError("runner task has already started") - - self._task = asyncio.create_task(self._runner()) - self._task.add_done_callback(asyncio.Task.result) - - def started(self) -> bool: - """Returns True if the runner has been started. - - This method will remain true even if the runner stops afterwards. - - """ - return self._task is not None - - def swim(self): - """Allows the runner to continue the next iteration. - This method must be called continuously to keep the runner alive.""" - self._swimming = True - - def stop(self): - """Stops the runner's execution after the current iteration. - - This method takes precedence when `swim()` is also called. - - """ - self._stop = True - self.reload() - - # Interval shifting - - def _allow_delay_correction(self): - """Allows the interval to be corrected in the next iteration.""" - self._can_correct_delay = True - - def _correct_interval(self, delay: Union[float, int]): - """Checks if the interval should be corrected. - - Interval correction occurs when `_allow_delay_correction()` - is called, and the given delay is different from the last delay - *only* for the current iteration. If the delay did not change, - delay correction must be requested again. - - :param delay: The delay being used in the current iteration. - - """ - if self._can_correct_delay and delay != self._last_delay: - self._delta = self.clock.tick - self._expected_interval - with self.clock._scoped_tick_shift(-self._delta): - self.interval_shift = self.clock.get_beat_ticks(delay) - - self._last_delay = delay - - self._can_correct_delay = False - - def _get_corrected_interval( - self, - delay: Union[float, int], - *, - delta_correction: bool = False, - offset: int = 0, - ) -> int: - """Returns the number of ticks until the next `delay` interval, - offsetted by the `offset` argument. - - This method also adjusts the interval according to the - `interval_shift` attribute. - - :param delay: The number of beats within each interval. - :param delta_correction: - If enabled, the interval is adjusted to correct for - any drift from the previous iteration, i.e. whether the - runner was slower or faster than the expected interval. - :param offset: - The number of ticks to offset from the interval. - A positive offset means the result will be later than - the actual interval, while a negative offset will be sooner. - :returns: The number of ticks until the next interval is reached. - - """ - delta = self._delta if delta_correction else 0 - with self.clock._scoped_tick_shift(self.interval_shift - delta - offset): - return self.clock.get_beat_ticks(delay) - delta - - # Runner loop - - async def _runner(self): - """The entry point for AsyncRunner. This can only be started - once per AsyncRunner instance through the `start()` method. - - Drift correction - ---------------- - In this loop, there is a potential for drift to occur anywhere with - an async/await keyword. The relevant steps here are: - - 1. Correct interval - 2. (await) Sleep until interval - 3. (await) Call function - 4. Repeat - - Step 2 tends to add a tick of latency (a result of `asyncio.wait()`), - otherwise known as a +1 drift. When using deferred scheduling, step 3 - subtracts that drift to make sure sounds are still scheduled for - the correct tick. If more asynchronous steps are added before the - call function, deferred scheduling *must* account for their drift - as well. - - Step 3 usually adds a 0 or +1 drift, although slow functions may - increase this drift further. Assuming the clock isn't being blocked, - we can fully measure this using the expected interval. - - For functions using static delays/intervals, this is not required - as `Clock.get_beat_ticks()` can re-synchronize with the interval. - However, when we need to do interval correction, a.k.a. tick shifting, - we need to compensate for this drift to ensure the new interval - precisely has the correct separation from the previous interval. - This is measured in the `_delta` attribute as similarly named in - the `Clock` class, but is only computed when interval correction - is needed. - - """ - self.swim() - last_state = self.states[-1] - name = last_state.func.__name__ - if name != "_global_runner": - print_panel(f"[yellow][[red]{name}[/red] is swimming][/yellow]") - - try: - while self.states and self._swimming and not self._stop: - # `state.func` must schedule itself to keep swimming - self._swimming = False - self._reload_event.clear() - state = self.states[-1] - name = state.func.__name__ - - if state is not last_state: - pushed = len(self.states) > 1 and self.states[-2] is last_state - if name != "_global_runner": - if pushed: - print_panel(f"[yellow][Updating [red]{name}[/red]]") - else: - print_panel( - f"[yellow][Saving [red]{name}[/red] from crash]" - ) - last_state = state - - signature = inspect.signature(state.func) - - try: - _assert_function_signature(signature, state.args, state.kwargs) - - # Remove any kwargs not present in the new function - # (prevents TypeError when user reduces the signature) - args = state.args - kwargs = _discard_kwargs(signature, state.kwargs) - - delay = _extract_new_delay(signature, state.kwargs) - except (TypeError, ValueError) as e: - print(f"[red][Bad function definition ({name})]") - traceback.print_exception(type(e), e, e.__traceback__) - self._revert_state() - self.swim() - continue - - self._correct_interval(delay) - self._expected_interval = ( - self.clock.tick - + self._get_corrected_interval(delay, delta_correction=True) - ) - - # start = self.clock.tick - - handle = self._wait_beats(delay) - reload_task = asyncio.ensure_future(self._reload_event.wait()) - done, pending = await asyncio.wait( - (asyncio.ensure_future(handle), reload_task), - return_when=asyncio.FIRST_COMPLETED, - ) - - sleep_drift = self.clock.tick - handle.when - - # print( - # f"{self.clock} AR [green]" - # f"expected: {self._expected_interval}, previous: {start}, " - # f"delta: {self._delta}, shift: {self.interval_shift}, " - # f"post drift: {sleep_drift}" - # ) - - for fut in pending: - fut.cancel() - if reload_task in done: - self.swim() - continue - - try: - # Use copied context in function by creating it as a task - await asyncio.create_task( - self._call_func(sleep_drift, state.func, args, kwargs), - name=f"asyncrunner-func-{name}", - ) - except Exception as e: - print(f"[red][Function exception | ({name})]") - traceback.print_exception(type(e), e, e.__traceback__) - self._revert_state() - self.swim() - finally: - # Remove from clock if necessary - print_panel(f"[yellow][Stopped [red]{name}[/red]][/yellow]") - self.clock.runners.pop(name, None) - - async def _call_func(self, delta: int, func, args, kwargs): - """Calls the given function and optionally applies an initial - tick shift of 1 beat when the `deferred` attribute is - set to True. - """ - if self.deferred: - ticks = 1 * self.clock.ppqn - delta - self.clock.tick_shift += ticks - - return await _maybe_coro(func, *args, **kwargs) - - def _wait_beats(self, n_beats: Union[float, int]) -> "TickHandle": - """Returns a TickHandle waiting until one tick before the - given number of beats is reached. - """ - clock = self.clock - ticks = self._get_corrected_interval(n_beats, offset=-1) - return clock.wait_after(n_ticks=ticks) - - def _revert_state(self): - failed = self.states.pop() - - if self.states: - # patch the global scope so recursive functions don't - # invoke the failed function - reverted = self.states[-1] - failed.func.__globals__[failed.func.__name__] = reverted.func diff --git a/sardine/clock/Clock.py b/sardine/clock/Clock.py deleted file mode 100644 index 61962c46..00000000 --- a/sardine/clock/Clock.py +++ /dev/null @@ -1,807 +0,0 @@ -import asyncio -import contextlib -import contextvars -import functools -import inspect -import heapq -import time -import mido - -from typing import Awaitable, Callable, Optional, TypeVar, Union -from sardine.io.Osc import Client -from collections import deque -from rich import print - -from . import AsyncRunner -from ..sequences import ListParser -from ..sequences.Iterators import Iterator -from ..sequences.Variables import Variables -from ..io import MIDIIo, ClockListener, SuperDirtSender, MIDISender, OSCSender - -__all__ = ("Clock", "TickHandle") - -T = TypeVar("T") -MaybeCoroFunc = Callable[..., Union[T, Awaitable[T]]] - -# This specifies the number of ticks to offset the clock in the current context. -# Usually this tick shift is updated within the context of scheduled functions -# to simulate sleeping without actually blocking the function. Behavior is -# undefined if the tick shift is changed in the global context. -tick_shift = contextvars.ContextVar("tick_shift", default=0) - - -@functools.total_ordering -class TickHandle: - - """A handle that allows waiting for a specific tick to pass in the clock.""" - - __slots__ = ("when", "fut") - - def __init__(self, tick: int): - self.when = tick - self.fut = asyncio.Future() - - def __repr__(self): - return "<{} {} when={}>".format( - type(self).__name__, - "pending" - if not self.fut.done() - else "done" - if not self.fut.cancelled() - else "cancelled", - self.when, - ) - - def __eq__(self, other): - if not isinstance(other, TickHandle): - return NotImplemented - return self.when == other.when and self.fut == other.fut - - def __hash__(self): - return hash((self.when, self.fut)) - - def __lt__(self, other): - if not isinstance(other, TickHandle): - return NotImplemented - return self.when < other.when - - def __await__(self): - return self.fut.__await__() - - def cancel(self): - return self.fut.cancel() - - def cancelled(self): - return self.fut.cancelled() - - -class Clock: - - """ - MIDI Clock and scheduler implementation. This class is - the core of Sardine. It generates an asynchronous MIDI - clock and will schedule functions on it accordingly. - - Keyword arguments: - port_name: str -- Exact String for the MIDIOut Port. - bpm: Union[int, float] -- Clock Tempo in beats per minute - beats_per_bar: int -- Number of beats in a given bar - deferred_scheduling: bool -- Whether the clock implicitly defers - sounds sent in functions or not. - """ - - def __init__( - self, - midi_port: Optional[str], - ppqn: int = 48, - bpm: Union[float, int] = 120, - beats_per_bar: int = 4, - deferred_scheduling: bool = True, - debug: bool = True, - ): - self._midi = MIDIIo(port_name=midi_port, clock=self) - - # This OSC port will forward clock information to external listeners - # This is a nice way to intercept clock messages for applications - # spying on Sardine. - self._osc = Client( - ip="127.0.0.1", port=12345, name="SardineOsc", ahead_amount=0 - ) - - # Clock parameters - self._accel: float = 0.0 - self._nudge: float = 0.0 - self._midi_nudge: float = 0.0 - self._superdirt_nudge: float = 0.3 - self._bpm: float = bpm - self._ppqn: int = ppqn - self.beat_per_bar: int = beats_per_bar - self.running: bool = False - self.debug: bool = False - - # Scheduling attributes - self.runners: dict[str, AsyncRunner] = {} - self.tick_handles: list[TickHandle] = [] - self._deferred_scheduling = deferred_scheduling - - # Real-time attributes - self._current_tick = 0 - self._delta = 0.0 - self._phase_snapshot = 0 - - # MIDI In Listener - self._midi_port = midi_port - self._listener = None - self._delta_duration_list = deque(maxlen=200) - - # Ableton Link related - self._link = None - self._linktime = { - "tempo": 0, - "beats": 0, - "phase": 0, - } - - # Parser - self.iterators = Iterator() - self.variables = Variables() - self._parser_debug = debug - self.parser = ListParser( - clock=self, - variables=self.variables, - iterators=self.iterators, - debug=self._parser_debug, - ) - - def __repr__(self): - shift = self.tick_shift - if shift: - tick = f"{self._current_tick}{shift:+}" - else: - tick = str(self._current_tick) - - return "<{} running={} tick={}>".format(type(self).__name__, self.running, tick) - - # ---------------------------------------------------------------------- # - # Clock properties - - @property - def linktime(self) -> dict: - """Return current Link clock time""" - return self._linktime - - @linktime.setter - def linktime(self, new_time: dict) -> None: - self._linktime = self._get_new_linktime(new_time) - - @property - def nudge(self) -> int: - return self._nudge - - @nudge.setter - def nudge(self, value: int): - """ - Nudge the clock to align on another peer. Very similar to accel - but temporary. Nudge will reset every time the clock loops around. - - Args: - value (int): nudge factor - """ - self._nudge = value - self._reload_runners() - - @property - def midi_nudge(self) -> int: - return self._nudge - - @midi_nudge.setter - def midi_nudge(self, value: int): - """ - Nudge every MIDI Message by a given amount of time. Useful for synchronization - purposes when everything else fails and if you are still lagging a few milliseconds - behind everything else. - - Args: - value (int): nudge amount - """ - self._midi_nudge = value - - @property - def superdirt_nudge(self) -> int: - return self._superdirt_nudge - - @superdirt_nudge.setter - def superdirt_nudge(self, value: int): - """Nudge every SuperDirt Message by a given amount of time. - Beware, nudging in the clock through this attribute is not - the same as nudging the OSC messages themselves. If you get - late messages in SuperDirt, please configure ahead_amount for - the OSC object. - - Args: - value (int): nudge amount - """ - self._superdirt_nudge = value - - @property - def accel(self) -> int: - return self._accel - - @accel.setter - def accel(self, value: int): - """ - Accel stands for acceleration. In active MIDI mode, accel acts as a way - to nudge the clock to run faster or slower (from 0 to 100%). It can be - quite useful when dealing with a musician that can't really use any - synchronisation protocol. Beware, you will have to manually reset the - accel amount after setting it if you want to stop and not fasten the - clock. - - Args: - value (int): a nudge factor from 0 to 100% - - Raises: - ValueError: if 'value' exceeds 100% - """ - if value >= 100: - raise ValueError("Cannot set acceleration above 100%.") - self._accel = value - self._reload_runners() - - @property - def deferred_scheduling(self): - return self._deferred_scheduling - - @deferred_scheduling.setter - def deferred_scheduling(self, enabled: bool): - """Turn on deferred scheduling. - - Args: - enabled (bool): True or False - """ - self._deferred_scheduling = enabled - - for runner in self.runners.values(): - runner.deferred = enabled - - @property - def tick(self) -> int: - return self._current_tick + self.tick_shift - - @tick.setter - def tick(self, new_tick: int) -> int: - """ - Tick is the tiniest grain of time recognized by the Sardine Clock. - A tick is the time taken by the clock to loop on itself. Ticks are - used to deduce all other temporal informations: beat, bar, etc... - They are also sometimes used to compute duration of a given event. - - Args: - new_tick (int): give a new tick (backwards or forward in time) - """ - change = new_tick - self._current_tick - self._current_tick = new_tick - self._shift_handles(change) - self._reload_runners() - self._update_handles() - - @property - def bpm(self) -> int: - return self._bpm - - @bpm.setter - def bpm(self, new_bpm: int): - """Beats per minute. Tempo for the Sardine Clock. - - Args: - new_bpm (int): new tempo value - - Raises: - ValueError: if tempo < 20 or tempo > 999 (non-musical values) - """ - if not 20 < new_bpm < 999: - raise ValueError("bpm must be within 1 and 800") - self._bpm = new_bpm - if self._link: - pass - self._reload_runners() - - @property - def ppqn(self) -> int: - return self._ppqn - - @ppqn.setter - def ppqn(self, pulses_per_quarter_note: int) -> int: - """Pulse per quarter note: how many pulses to form a quarter - note. Typically used by MIDI Clocks, the PPQN is an important - value to determine how fast a clock will be ticking and counting - beats, measures, etc... It is important to make sure that your PPQN - is identic to the PPQN of the device you are trying to synchronise - with. - - Args: - pulses_per_quarter_note (int): Generally a multiple of 2 (24, 48). - """ - self._ppqn = pulses_per_quarter_note - self._reload_runners() - - @property - def current_beat(self) -> int: - """The number of beats passed since the initial time.""" - return self.tick // self.ppqn - - @property - def beat(self) -> int: - """The number of beats passed since the initial time.""" - return self.tick // self.ppqn - - @property - def current_bar(self) -> int: - """The number of bars passed since the initial time.""" - return self.current_beat // self.beat_per_bar - - @property - def bar(self) -> int: - """The number of bars passed since the initial time.""" - return self.current_beat // self.beat_per_bar - - @property - def phase(self) -> int: - """The phase of the current beat in ticks.""" - return self.tick % self.ppqn - - @property - def tick_shift(self) -> int: - """The tick shift in the current context. - - This is useful for simulating sleeps without blocking. - - If the real-time clock tick needs to be shifted, - assign to the `c.tick` property instead. - - """ - return tick_shift.get() - - @tick_shift.setter - def tick_shift(self, n_ticks: int): - tick_shift.set(n_ticks) - - # ---------------------------------------------------------------------- # - # Clock methods - - # ---------------------------------------------------------------------------------------- - # Link related functions - - def _get_new_linktime(self, new_time: dict): - """Calculate a new mean Linktime from Link""" - info = self._capture_link_info() - self._linktime.update( - { - "tempo": info["tempo"], - "beats": (self._linktime["beats"] + info["beats"]), - "phase": (self._linktime["phase"] + info["phase"]), - } - ) - - def link(self): - """ - Synchronise Sardine with Ableton Link. This method will call a new - instance of link through the LinkPython package (pybind11). As soon - as this instance is created, Sardine will instantly try to lock on - the new timegrid and share tempo with Link peers. - - NOTE: the Ableton Link mechanism is currently unstable and should be - used for test purposes only. You might end up loosing some events that - are ready for scheduling in your swimming functions! - """ - import link - - self._link = link.Link(self.bpm) - self._link.enabled = True - self._link.startStopSyncEnabled = True - - # We need to capture a first snapshot of time outside of the main - # mechanism in order to start calculations somewhere... - i = self._capture_link_info() - self._phase_snapshot = int( - abs( - round( - self._scale( - i["phase"], - (0, self.beat_per_bar), - (0, self.ppqn * self.beat_per_bar), - ) - ) - % self.ppqn - ) - ) - - print("[red bold]Joining Link Session: [/red bold]", end="") - - def unlink(self): - """ - Close connexion to Ableton Link by deleting the object. Sardine will - continue as if time never stopped ticking from the moment the clock - was disconnected from Link Clock (ticks will continue to increase, as - well as beat number). Tempo will not be updated to fall back to initial - value. - """ - del self._link - self._link = None - print("[red bold]Broke away from Link Session[/red bold]") - - def link_stop(self): - """ - Ableton Link Method to stop the timeline - """ - self._link.enabled = False - - def _capture_link_info(self): - """Capture information about the current state of the Ableton Link - session. This internal method will try to gather high-res temporal - informations for later processing. - - Returns: - dict: a dictionnary containing temporal information about the Link - session. - """ - if self._link: - s = self._link.captureSessionState() - link_time = self._link.clock().micros() - tempo_str = s.tempo() - beats_str = s.beatAtTime(link_time, self.beat_per_bar) - playing_str = str(s.isPlaying()) - phase = s.phaseAtTime(link_time, self.beat_per_bar) - return { - "tempo": tempo_str, - "beats": beats_str, - "playing": playing_str, - "phase": phase, - } - - def _link_log(self): - """Print state of current Ableton Link session on stdout.""" - i = self._capture_link_info() - print( - f'tempo {i["tempo"]} | playing {i["playing"]} | beats {i["beats"]} | phase {i["phase"]}' - ) - - def _scale(self, x: Union[int, float], old: tuple[int, int], new: tuple[int, int]): - return (x - old[0]) * (new[1] - new[0]) / (old[1] - old[0]) + new[0] - - def _link_phase_to_ppqn(self, captured_info: dict): - """Convert Ableton Link phase (0 to quantum, aka number of beats) - to Sardine phase (based on ticks and pulses per quarter notes). - The conversion is done using the internal _scale subfunction. - - Returns: - int: current phase (0 - self.ppqn) - """ - new_phase = ( - round( - self._scale( - captured_info["phase"], - (0, self.beat_per_bar), - (0, self.ppqn * self.beat_per_bar), - ) - ) - % self.ppqn - ) - - # Whatever happens, we need to move forward - if self._phase_snapshot == new_phase: - new_phase += 1 - # But we can't tolerate phase discontinuities - if new_phase == self._phase_snapshot + 2: - new_phase -= 1 - - self._phase_snapshot = new_phase - return int(abs(self._phase_snapshot)) - - def _link_beat_to_sardine_beat(self, captured_info: dict): - """Convert Ableton Link beats to valid Sardine beat""" - return int(captured_info["beats"]) - - def _link_time_to_ticks(self, captured_info: dict): - """Convert Ableton Link time to ticks, used by _increment_clock""" - phase = int(self._link_phase_to_ppqn(captured_info)) - beat = int(captured_info["beats"]) * (self.ppqn) - return beat + phase - - def get_beat_ticks(self, n_beats: Union[int, float], *, sync: bool = True) -> int: - """Determines the number of ticks to wait for N beats to pass. - - :param n_beats: The number of beats to wait for. - :param sync: - If True, the beat interval will be synchronized - to the start of the clock, returning an adjusted number - of ticks to match that interval. - If False, no synchronization is done and the returned - number of ticks will always be the number of beats multiplied - by the clock `ppqn`. - :returns: The number of ticks needed to wait. - - """ - interval = int(self.ppqn * n_beats) - if interval <= 0: - return 0 - elif not sync: - return interval - - return interval - self.tick % interval - - def get_bar_ticks(self, n_bars: Union[int, float], *, sync: bool = True) -> int: - """Determines the number of ticks to wait for N bars to pass. - - :param n_bars: The number of bars to wait for. - :param sync: - If True, the ticks calculated for the first bar - is reduced to synchronize with the clock. - :returns: The number of ticks needed to wait. - - """ - interval = int(self.ppqn * self.beat_per_bar * n_bars) - if interval <= 0: - return 0 - elif not sync: - return interval - - return interval - self.tick % interval - - @contextlib.contextmanager - def _scoped_tick_shift(self, n_ticks: int): - """Returns a context manager that adds `n_ticks` ticks to the clock - in the current context. - - After the context manager is exited, the tick shift is restored - to its original value. - - """ - token = tick_shift.set(tick_shift.get() + n_ticks) - try: - yield - finally: - tick_shift.reset(token) - - def _get_tick_duration(self) -> float: - """Determines the numbers of seconds the next tick will take. - - Only required when clock is running in active mode. - - """ - accel_mult = 1 - self.accel / 100 - nudge = self._nudge - self._nudge = 0 - interval = 60 / self.bpm / self.ppqn * accel_mult - result = (interval - self._delta) + nudge - return result if result >= 0 else 0.0 - - def _increment_clock(self, temporal_information: Optional[dict]): - """ - This method is in charge of increment the clock (moving forward - in time). In normal MIDI Clock Mode, this is as simple as - ticking forward (+1) and updating handles so they notice that - change. - - If Link is activated, temporal information must be received in - order to pinpoint the actual point of Link in time. This way, - Sardine can move time in accord with that reference point, while - trying to preserve its internal logic based on pulses per quarter - notes. - """ - if temporal_information: - self._current_tick = self._link_time_to_ticks(temporal_information) - else: - self._current_tick += 1 - self._update_handles() - - def _reload_runners(self): - for runner in self.runners.values(): - runner.reload() - - def _shift_handles(self, n_ticks: int): - for handle in self.tick_handles: - handle.when += n_ticks - - def _update_handles(self): - # this is implemented very similarly to asyncio.BaseEventLoop - while self.tick_handles: - handle = self.tick_handles[0] - if handle.cancelled(): - heapq.heappop(self.tick_handles) - elif self.tick >= handle.when: - handle.fut.set_result(None) - heapq.heappop(self.tick_handles) - else: - # all handles afterwards are either still waiting or cancelled - break - - # ---------------------------------------------------------------------- # - # Scheduler methods - - def schedule_func(self, func: MaybeCoroFunc, /, *args, **kwargs): - """Schedules the given function to be executed.""" - if not (inspect.isfunction(func) or inspect.ismethod(func)): - raise TypeError(f"func must be a function, not {type(func).__name__}") - - name = func.__name__ - runner = self.runners.get(name) - if runner is None: - runner = self.runners[name] = AsyncRunner( - clock=self, deferred=self.deferred_scheduling - ) - - runner.push(func, *args, **kwargs) - if runner.started(): - runner.reload() - runner.swim() - else: - runner.start() - - def remove(self, func: MaybeCoroFunc, /): - """Schedules the given function to stop execution.""" - runner = self.runners.get(func.__name__) - if runner is not None: - runner.stop() - - def wait_until(self, *, tick: int) -> TickHandle: - """Returns a TickHandle that waits for the clock to reach a certain tick.""" - handle = TickHandle(tick) - - # NOTE: we specifically don't want this influenced by `tick_shift` - if self._current_tick >= tick: - handle.fut.set_result(None) - else: - heapq.heappush(self.tick_handles, handle) - - return handle - - def wait_after(self, *, n_ticks: int) -> TickHandle: - """Returns a TickHandle that waits for the clock to pass N ticks from now.""" - return self.wait_until(tick=self.tick + n_ticks) - - # ---------------------------------------------------------------------- # - # Public methods - - def print_children(self): - """Print all children on clock""" - [print(child) for child in self.runners] - - def start(self, active=True): - """Start MIDI Clock""" - self.reset() - if not self.running: - self._midi.send(mido.Message("start")) - self.running = True - if active: - asyncio.create_task(self.run_active()) - else: - asyncio.create_task(self.run_passive()) - - def reset(self): - for runner in self.runners.values(): - runner.stop() - for handle in self.tick_handles: - handle.cancel() - - self.runners.clear() - self.tick_handles.clear() - - def stop(self) -> None: - """ - MIDI Stop message. - """ - # Kill every runner - - self.running = False - self._midi.send_stop() - self._midi.send(mido.Message("stop")) - self.reset() - - def log(self) -> None: - """ - Pretty print information about Clock timing on the console. - Used for debugging purposes. Not to be used when playing, - can be very verbose. Will overflow the console in no-time. - """ - cbib = (self.current_beat % self.beat_per_bar) + 1 - bar = self.current_bar - - color = "[bold yellow]" - first = ( - color + f"BPM: {self.bpm}, PHASE: {self.phase:02}, DELTA: {self._delta:2f}" - ) - second = color + f" || TICK: {self.tick} BAR:{bar} {cbib}/{self.beat_per_bar}" - print(first + second) - if self._link: - self._link_log() - - def note(self, sound: str, at: int = 0, **kwargs) -> SuperDirtSender: - return SuperDirtSender(self, sound, at, nudge=self._superdirt_nudge, **kwargs) - - def midinote( - self, - note: Union[int, str] = 60, - velocity: Union[int, str] = 100, - channel: Union[int, str] = 0, - dur: Union[int, float, str] = 48, - trig: Union[bool, int] = 1, - at: int = 0, - **kwargs, - ) -> MIDISender: - return MIDISender( - self, - self._midi, - at=at, - delay=dur, - note=note, - trig=trig, - velocity=velocity, - channel=channel, - nudge=self._midi_nudge, - **kwargs, - ) - - def oscmessage(self, connexion, address: str, at: int = 0, **kwargs) -> OSCSender: - return OSCSender( - clock=self, osc_client=connexion, address=address, at=at, **kwargs - ) - - def _sardine_beat_to_link_beat(self): - integer_part = self.beat + 1 - floating_part = self.phase / self.ppqn - return float(integer_part + floating_part) - - async def run_active(self): - """Main runner for the active mode (master)""" - self._current_tick, self._delta = 0, 0.0 - - while self.running: - begin = time.perf_counter() - duration = self._get_tick_duration() - if self._link: - await asyncio.sleep(0.0) - self._midi._process_events() - self._increment_clock(temporal_information=self._capture_link_info()) - else: - await asyncio.sleep(duration) - self._midi.send_clock() - self._increment_clock(temporal_information=None) - self._midi._process_events() - self._osc._send_clock_information(self) - - elapsed = time.perf_counter() - begin - if self._link: - self._delta = elapsed - else: - self._delta = elapsed - duration - - if self.debug: - self.log() - - def _estimate_bpm_from_delta(self, delta: float) -> float: - """Estimate the current BPM from delta value""" - quarter_duration = delta * self.ppqn - return 60 / quarter_duration - - def _mean_from_delta(self): - """Estimate the current BPM by doing an arithmetic mean""" - return sum(self._delta_duration_list) / len(self._delta_duration_list) - - async def run_passive(self): - """Main runner for the passive mode (slave)""" - self._listener = ClockListener(port=self._midi_port) - self._current_tick = 0 - self._delta = 0.0 - while self.running: - begin = time.perf_counter() - await asyncio.sleep(0.0) - self._listener.wait_for_tick() - self._increment_clock( - temporal_information=(self._capture_link_info() if self._link else None) - ) - elapsed = time.perf_counter() - begin - self._delta_duration_list.append(self._estimate_bpm_from_delta(elapsed)) - self._bpm = self._mean_from_delta() - if self.debug: - self.log() diff --git a/sardine/clock/__init__.py b/sardine/clock/__init__.py index 973fae05..4145fdbb 100644 --- a/sardine/clock/__init__.py +++ b/sardine/clock/__init__.py @@ -1,2 +1,3 @@ -from .AsyncRunner import * -from .Clock import * +from .internal_clock import * +from .link_clock import * +from .time import * diff --git a/sardine/clock/internal_clock.py b/sardine/clock/internal_clock.py new file mode 100644 index 00000000..b7fac90a --- /dev/null +++ b/sardine/clock/internal_clock.py @@ -0,0 +1,88 @@ +import asyncio +import math +import time +from typing import Union + +from ..base import BaseClock + +NUMBER = Union[int, float] + +__all__ = ("InternalClock",) + + +class InternalClock(BaseClock): + def __init__( + self, + tempo: NUMBER = 120, + bpb: int = 4, + ): + super().__init__() + self.tempo = tempo + self.beats_per_bar = bpb + self._internal_origin = 0.0 + + #### GETTERS ############################################################ + + @property + def bar(self) -> int: + return self.beat // self.beats_per_bar + + @property + def beat(self) -> int: + # FIXME: Internal clock beat will abruptly change with tempo + return int(self.shifted_time // self.beat_duration) + + @property + def beat_duration(self) -> float: + return self._beat_duration + + @property + def beats_per_bar(self) -> int: + return self._beats_per_bar + + @property + def internal_origin(self) -> float: + return self._internal_origin + + @property + def internal_time(self) -> float: + return time.perf_counter() + + @property + def phase(self) -> float: + return self.shifted_time % self.beat_duration + + @property + def tempo(self) -> float: + return self._tempo + + #### SETTERS ############################################################ + + @beats_per_bar.setter + def beats_per_bar(self, bpb: int): + self._beats_per_bar = bpb + + @internal_origin.setter + def internal_origin(self, origin: float): + self._internal_origin = origin + + @tempo.setter + def tempo(self, new_tempo: NUMBER): + new_tempo = float(new_tempo) + + if not 1 <= new_tempo <= 999: + raise ValueError("new tempo must be within 1 and 999") + + self._tempo = new_tempo + self._beat_duration = 60 / new_tempo + + ## METHODS ############################################################## + + async def sleep(self, duration: Union[float, int]) -> None: + return await asyncio.sleep(duration) + + async def run(self): + # The internal clock simply uses the system's time + # so we don't need to do any polling loop here + self._internal_origin = self.internal_time + await asyncio.sleep(math.inf) diff --git a/sardine/clock/link_clock.py b/sardine/clock/link_clock.py new file mode 100644 index 00000000..27b2f83e --- /dev/null +++ b/sardine/clock/link_clock.py @@ -0,0 +1,119 @@ +from typing import Optional, Union + +import link + +from ..base import BaseClock, BaseThreadedLoopMixin + +NUMBER = Union[int, float] + +__all__ = ("LinkClock",) + + +class LinkClock(BaseThreadedLoopMixin, BaseClock): + def __init__( + self, + tempo: NUMBER = 120, + bpb: int = 4, + loop_interval: float = 0.001, + ): + super().__init__(loop_interval=loop_interval) + + self._link: Optional[link.Link] = None + self._beat: int = 0 + self._beat_duration: float = 0.0 + self._beats_per_bar: int = bpb + self._internal_origin: float = 0.0 + self._internal_time: float = 0.0 + self._phase: float = 0.0 + self._playing: bool = False + self._tempo: float = float(tempo) + + ## GETTERS ################################################ + + @property + def bar(self) -> int: + return self.beat // self.beats_per_bar + + @property + def beat(self) -> int: + return self._beat + int(self.beat_shift) + + @property + def beat_duration(self) -> float: + return self._beat_duration + + @property + def beat_shift(self) -> float: + """A shorthand for time shift expressed in number of beats.""" + return self.env.time.shift / self.beat_duration + + @property + def beats_per_bar(self) -> int: + return self._beats_per_bar + + @property + def internal_origin(self) -> float: + return self._internal_origin + + @property + def internal_time(self) -> float: + return self._internal_time + + @property + def phase(self) -> float: + return (self._phase + self.beat_shift) % self.beat_duration + + @property + def tempo(self) -> float: + return self._tempo + + ## SETTERS ############################################################## + + @beats_per_bar.setter + def beats_per_bar(self, bpb: int): + self._beats_per_bar = bpb + + @internal_origin.setter + def internal_origin(self, origin: float): + self._internal_origin = origin + + @tempo.setter + def tempo(self, new_tempo: float) -> None: + if self._link is not None: + session = self._link.captureSessionState() + session.setTempo(new_tempo, self.beats_per_bar) + self._link.commitSessionState(session) + + ## METHODS ############################################################## + + def _capture_link_info(self): + s: link.SessionState = self._link.captureSessionState() + link_time: int = self._link.clock().micros() + beat: float = s.beatAtTime(link_time, self.beats_per_bar) + phase: float = s.phaseAtTime(link_time, self.beats_per_bar) + playing: bool = s.isPlaying() + tempo: float = s.tempo() + + self._internal_time = link_time / 1_000_000 + self._beat = int(beat) + self._beat_duration = 60 / tempo + # Sardine phase is typically defined from 0.0 to the beat duration. + # Conversions are needed for the phase coming from the LinkClock. + self._phase = phase % 1 * self.beat_duration + self._playing = playing + self._tempo = tempo + + def before_loop(self): + self._link = link.Link(self._tempo) + self._link.enabled = True + self._link.startStopSyncEnabled = True + + # Set the origin at the start + self._capture_link_info() + self._internal_origin = self.internal_time + + def loop(self): + self._capture_link_info() + + def after_loop(self): + self._link = None diff --git a/sardine/clock/time.py b/sardine/clock/time.py new file mode 100644 index 00000000..2dad9b69 --- /dev/null +++ b/sardine/clock/time.py @@ -0,0 +1,81 @@ +import contextlib +import contextvars + +from ..base import BaseHandler + +__all__ = ("Time",) + +shift = contextvars.ContextVar("shift", default=0.0) +""" +This specifies the amount of time to offset in the current context. +Usually this is updated within the context of scheduled functions +to simulate sleeping without actually blocking the function. Behavior is +undefined if time is shifted in the global context. +""" + + +class Time(BaseHandler): + """Contains the origin of a FishBowl's time. + + Any new clocks must continue from this origin when they are running, + and must update the origin when they are paused or stopped. + """ + + def __init__( + self, + origin: float = 0.0, + ): + super().__init__() + self._origin = origin + + def __repr__(self) -> str: + return "{}({})".format( + type(self).__name__, + " ".join(f"{attr}={getattr(self, attr)!r}" for attr in ("origin",)), + ) + + @property + def origin(self) -> float: + """The origin of the fish bowl's time. + + When this property is updated, an `origin_update` event + will be dispatched with two arguments, the old and the new + origin. + """ + return self._origin + + @origin.setter + def origin(self, new_origin: float): + old_origin = self._origin + self._origin = new_origin + + self.env.dispatch("origin_update", old_origin, new_origin) + + @property + def shift(self) -> float: + """The time shift in the current context. + + This is useful for simulating sleeps without blocking. + """ + return shift.get() + + @shift.setter + def shift(self, seconds: int): + shift.set(seconds) + + @contextlib.contextmanager + def scoped_shift(self, seconds: float): + """Returns a context manager that adds `seconds` to the clock. + + After the context manager is exited, the time shift is restored + to its previous value. + """ + token = shift.set(shift.get() + seconds) + try: + yield + finally: + shift.reset(token) + + def reset(self): + """Resets the time origin back to 0.""" + self._origin = 0.0 diff --git a/sardine/event_loop/__init__.py b/sardine/event_loop/__init__.py new file mode 100644 index 00000000..1a15dd04 --- /dev/null +++ b/sardine/event_loop/__init__.py @@ -0,0 +1,80 @@ +import asyncio + +import rich + +from .loop import * +from .mixin import * +from .policy import * +from .sansio import * + +__all__ = ("install_policy", "new_event_loop") + + +def _install_precision_proactor() -> bool: + if PrecisionProactorEventLoop is None: + rich.print("[yellow]Skipping precision event loop on non-Windows system") + return False + + asyncio.set_event_loop_policy(PrecisionProactorEventLoopPolicy()) + rich.print("[yellow]Installed precision proactor event loop") + return True + + +def _install_precision_sansio() -> bool: + asyncio.set_event_loop_policy(PrecisionSansIOEventLoopPolicy()) + rich.print("[yellow]installed precision Sans I/O event loop") + rich.print("[bold red]WARNING: event loop does not networking/subprocesses") + return True + + +def _install_precision_selector() -> bool: + asyncio.set_event_loop_policy(PrecisionSelectorEventLoopPolicy()) + rich.print("[yellow]Installed precision selector event loop") + return True + + +def _install_uvloop() -> bool: + try: + import uvloop + except ImportError: + rich.print("[green]uvloop[/green] [yellow]is not installed") + return False + + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + rich.print("[yellow]Installed uvloop event loop") + return True + + +def install_policy(): + """Installs the best-available event loop policy into asyncio. + + This method must be called before any event loop is created, otherwise + it will not affect those event loops. + """ + methods = ( + # _install_precision_sansio, + _install_uvloop, + # _install_precision_proactor, + # _install_precision_selector, + ) + successful = False + for func in methods: + successful = func() + if successful: + break + + if not successful: + rich.print( + "[yellow]No custom event loop applied; rhythm accuracy may be impacted" + ) + + +def new_event_loop() -> asyncio.BaseEventLoop: + """Creates the best-available event loop without permanently installing + a new policy for asyncio. + """ + last_policy = asyncio.get_event_loop_policy() + install_policy() + loop = asyncio.new_event_loop() + asyncio.set_event_loop_policy(last_policy) + return loop diff --git a/sardine/event_loop/loop.py b/sardine/event_loop/loop.py new file mode 100644 index 00000000..d9586737 --- /dev/null +++ b/sardine/event_loop/loop.py @@ -0,0 +1,27 @@ +import asyncio + +from .mixin import PerfCounterMixin +from .sansio import SansIOEventLoop + +__all__ = ( + "PrecisionProactorEventLoop", + "PrecisionSansIOEventLoop", + "PrecisionSelectorEventLoop", +) + + +if hasattr(asyncio, "ProactorEventLoop"): + + class PrecisionProactorEventLoop(PerfCounterMixin, asyncio.ProactorEventLoop): + ... + +else: + PrecisionProactorEventLoop = None + + +class PrecisionSansIOEventLoop(PerfCounterMixin, SansIOEventLoop): + ... + + +class PrecisionSelectorEventLoop(PerfCounterMixin, asyncio.SelectorEventLoop): + ... diff --git a/sardine/event_loop/mixin.py b/sardine/event_loop/mixin.py new file mode 100644 index 00000000..3cc097e9 --- /dev/null +++ b/sardine/event_loop/mixin.py @@ -0,0 +1,12 @@ +import time + +__all__ = ("PerfCounterMixin",) + + +class PerfCounterMixin: + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._clock_resolution = time.get_clock_info("perf_counter").resolution + + def time(self) -> float: + return time.perf_counter() diff --git a/sardine/event_loop/policy.py b/sardine/event_loop/policy.py new file mode 100644 index 00000000..be28b661 --- /dev/null +++ b/sardine/event_loop/policy.py @@ -0,0 +1,21 @@ +import asyncio + +from .loop import * + +__all__ = ( + "PrecisionProactorEventLoopPolicy", + "PrecisionSansIOEventLoopPolicy", + "PrecisionSelectorEventLoopPolicy", +) + + +class PrecisionProactorEventLoopPolicy(asyncio.DefaultEventLoopPolicy): + _loop_factory = PrecisionProactorEventLoop + + +class PrecisionSansIOEventLoopPolicy(asyncio.DefaultEventLoopPolicy): + _loop_factory = PrecisionSansIOEventLoop + + +class PrecisionSelectorEventLoopPolicy(asyncio.DefaultEventLoopPolicy): + _loop_factory = PrecisionSelectorEventLoop diff --git a/sardine/event_loop/sansio.py b/sardine/event_loop/sansio.py new file mode 100644 index 00000000..9409df82 --- /dev/null +++ b/sardine/event_loop/sansio.py @@ -0,0 +1,40 @@ +import asyncio +import threading +from typing import Optional + +__all__ = ("SansIOEventLoop", "SansSelector") + + +class SansSelector: + + _event_list = [] + + def __init__(self, wake_cond: threading.Condition): + self._wake_cond = wake_cond + + def select(self, timeout: Optional[int]): + timeout = timeout or 0.0 + with self._wake_cond: + self._wake_cond.wait(timeout) + return self._event_list + + +class SansIOEventLoop(asyncio.BaseEventLoop): + """An event loop implementation with zero I/O support. + + This removes the potential overhead of waiting on I/O from selectors, + replacing it with `time.sleep()`. Any native I/O APIs will **not** work + when using this implementation. + """ + + def __init__(self) -> None: + super().__init__() + self._wake_cond = wake_cond = threading.Condition() + self._selector = SansSelector(wake_cond) + + def _process_events(self, event_list): + pass + + def _write_to_self(self): + with self._wake_cond: + self._wake_cond.notify_all() diff --git a/sardine/fish_bowl.py b/sardine/fish_bowl.py new file mode 100644 index 00000000..c3f2e015 --- /dev/null +++ b/sardine/fish_bowl.py @@ -0,0 +1,390 @@ +import asyncio +import collections +from typing import Hashable, Iterable, Optional, Protocol, Union + +from exceptiongroup import BaseExceptionGroup + +from .base import BaseClock, BaseHandler, BaseParser +from .clock import InternalClock, Time +from .handlers import SleepHandler +from .scheduler import Scheduler +from .sequences import Iterator, ListParser, Variables + +__all__ = ("FishBowl",) + + +class HookProtocol(Hashable, Protocol): + """A callable object that accepts an event and any number of arguments.""" + + def __call__(self, event: str, *args): + ... + + +class FishBowl: + """Contains all the components necessary to run the Sardine system.""" + + def __init__( + self, + clock: Optional[BaseClock] = None, + iterator: Optional[Iterator] = None, + parser: Optional[BaseParser] = None, + scheduler: Optional[Scheduler] = None, + sleeper: Optional[SleepHandler] = None, + time: Optional[Time] = None, + variables: Optional[Variables] = None, + ): + self.clock = clock or InternalClock() + self.iterators = iterator or Iterator() + self.parser = parser or ListParser() + self.scheduler = scheduler or Scheduler() + self.sleeper = sleeper or SleepHandler() + self.time = time or Time() + self.variables = variables or Variables() + + self._handlers: dict[BaseHandler, None] = {} + self._alive = asyncio.Event() + self._resumed = asyncio.Event() + + self._event_hooks: dict[ + Optional[str], dict[HookProtocol, None] + ] = collections.defaultdict(dict) + # Reverse mapping for easier removal of hooks + self._hook_events: dict[ + HookProtocol, dict[Optional[str], None] + ] = collections.defaultdict(dict) + + self.add_handler(self.clock) + self.add_handler(self.parser) + self.add_handler(self.scheduler) + self.add_handler(self.sleeper) + self.add_handler(self.time) + + @property + def handlers(self) -> list[BaseHandler]: + """A list of all handlers added to this fish bowl.""" + return list(self._handlers) + + ## DUNDER ################################################################# + + def __del__(self): + self.stop() + for handler in self.handlers: + self.remove_handler(handler) + + def __repr__(self) -> str: + running = self.is_running() + paused = self.is_paused() + status = ( + "playing" + if running and not paused + else "paused" + if running and paused + else "stopped" + ) + + return "<{} {} clock={!r}>".format( + type(self).__name__, + status, + self.clock, + ) + + ## TRANSPORT ############################################################## + + def pause(self) -> bool: + """Pauses the fish bowl. + + This will emit a `pause` event unless the fish bowl does + not need to be paused, e.g. being paused once already or not + having started. + + Returns: + bool: True if the fish bowl was paused, False otherwise. + """ + allowed = self.is_running() and not self.is_paused() + if allowed: + self._resumed.clear() + self.dispatch("pause") + return allowed + + def resume(self) -> bool: + """Resumes the fish bowl. + + This will emit a `resume` event unless the fish bowl does + not need to be resumed, e.g. if the clock is not running + or has not been paused. + + Returns: + bool: True if the fish bowl was resumed, False otherwise. + """ + allowed = self.is_running() and self.is_paused() + if allowed: + self._resumed.set() + self.dispatch("resume") + return allowed + + def start(self) -> bool: + """Starts the fish bowl. + + This will emit a `start` event unless the fish bowl does + not need to be started, e.g. if the fish bowl has already started. + + If the fish bowl is started, paused, stopped, and started again, + handlers should treat it as if the fish bowl is no longer paused. + + Returns: + bool: True if the fish bowl was started, False otherwise. + """ + allowed = not self.is_running() + if allowed: + self._alive.set() + self._resumed.set() + self.dispatch("start") + return allowed + + def stop(self) -> bool: + """Stops the fish bowl. + + This will emit a `stop` event unless the fish bowl does + not need to be stopped, e.g. if the clock is not running. + + Returns: + bool: True if the fish bowl was stopped, False otherwise. + """ + allowed = self.is_running() + if allowed: + self._alive.clear() + self.dispatch("stop") + return allowed + + def is_paused(self): + """Checks if the fish bowl is paused.""" + return not self._resumed.is_set() + + def is_running(self): + """Checks if the fish bowl is running.""" + return self._alive.is_set() + + ## SLEEPING MANAGEMENT ############################################################ + + async def sleep(self, duration: Union[int, float]): + """Sleeps for the given duration. + + This method is simply a shorthand for `self.sleeper.sleep(duration)`. + """ + return await self.sleeper.sleep(duration) + + # Hot-swap methods ############################################################ + + def swap_clock(self, clock: "BaseClock"): + """Hot-swap the current clock for a different clock. + + This method will perform the following procedure: + 1. Pause the fish bowl + 2. Remove the old clock's handler + 3. Replace with the current clock and add as handler + 4. Resume fish bowl + 5. Trigger a `clock_swap` event with one argument, the new clock instance + """ + self.pause() + self.remove_handler(self.clock) + self.clock = clock + self.add_handler(clock) + self.resume() + self.dispatch("clock_swap", clock) + + ## HANDLERS ############################################################ + + def add_handler(self, handler: "BaseHandler"): + """Adds a new handler to the fish bowl. + + If the handler has any child handlers, they will be + recursively added to the fish bowl as well. + + This method is idempotent; adding the handler more than once + will cause nothing to happen. However, handler objects cannot + be shared across different fish bowls. + + If an error occurs during the handler setup (including its children), + the handler will be removed before propagating the exception. + This means that `teardown()` is called even after an incomplete setup. + + Args: + handler (BaseHandler): The handler being added. + + Raises: + ValueError: + The handler is either already added to a different fish bowl + or the handler's parent was not yet added to this fish bowl. + """ + if handler.env is not None: + if handler.env is self: + return + raise ValueError(f"{handler!r} was already added to {handler.env!r}") + elif handler.parent is not None and handler.parent.env is not self: + if handler.parent.env is None: + raise ValueError( + f"The parent {handler.parent!r} must be added to the fish bowl" + ) + else: + raise ValueError( + f"The parent {handler.parent!r} was already added to {handler.env!r}" + ) + + # It may be possible that the user set `env` to None, but + # given that `register_hook()` is idempotent, it's probably + # fine to call `BaseHandler.setup()` again + + handler._env = self # pylint: disable=protected-access + self._handlers[handler] = None + try: + handler.setup() + for child in handler.children: + self.add_handler(child) + except BaseException as e: + self.remove_handler(handler) + raise e + + def remove_handler(self, handler: "BaseHandler"): + """Removes an existing handler from the fish bowl. + + If the handler has any child handlers, they will be + recursively removed from the fish bowl as well. + + After the handler has been removed, it can be re-used in new fish bowls. + + This method is idempotent; removing the handler when + it has already been removed will cause nothing to happen. + + If an error occurs during the handler teardown (including its children), + its children and hooks will still be removed. At the end, all exceptions + are grouped into a `BaseExceptionGroup` before being raised. + + Args: + handler (BaseHandler): The handler to remove from the fish bowl. + + Raises: + ValueError: + One of the handler's parents has locked its children from + being removed. + """ + if handler not in self._handlers: + return + elif handler.locked: + raise ValueError(f"{handler!r} has been locked by its parent") + + exceptions: list[BaseException] = [] + + was_locked = handler.lock_children + handler.lock_children = None + + for child in handler.children: + try: + self.remove_handler(child) + except BaseException as e: # pylint: disable=invalid-name,broad-except + exceptions.append(e) + + handler.lock_children = was_locked + + try: + handler.teardown() + except BaseException as e: # pylint: disable=invalid-name,broad-except + exceptions.append(e) + + handler._env = None # pylint: disable=protected-access + del self._handlers[handler] + + event_set = self._hook_events.get(handler) + if event_set is not None: + for event in tuple(event_set): + self.unregister_hook(event, handler) + + if exceptions: + raise BaseExceptionGroup( + f"Errors raised while removing {handler!r}", exceptions + ) + + # Hook management + + def register_hook(self, event: Optional[str], hook: HookProtocol): + """Registers a hook for a given event. + + Whenever the fish bowl dispatches an event, the hooks associated + with that event will be called in an arbitrary order. + + Global hooks can also be registered by passing `None` as the event. + These hooks will be called on every event that is dispatched. + If a hook is registered both globally and for a specific event, + the hook will always be called once regardless. + + This method is idempotent; registering the same hook for + the same event will cause nothing to happen. + + Args: + event (Optional[str]): + The event name under which the hook will be registered. + If set to `None`, this will be a global hook. + hook (HookProtocol): + The hook to call whenever the event is triggered. + """ + hook_dict = self._event_hooks[event] + if hook in hook_dict: + return + + hook_dict[hook] = None + self._hook_events[hook][event] = None + + def unregister_hook(self, event: Optional[str], hook: HookProtocol): + """Unregisters a hook for a specific event. + + Global hooks can be removed by passing `None` as the event. + + This method is idempotent; unregistering a hook that does not + exist for a given event will cause nothing to happen. + + Args: + event (Optional[str]): The event to remove the hook from. + hook (HookProtocol): The hook being removed. + """ + hook_dict = self._event_hooks.get(event) + if hook_dict is not None: + hook_dict.pop(hook, None) + if not hook_dict: + del self._event_hooks[event] + + event_dict = self._hook_events.get(hook) + if event_dict is not None: + event_dict.pop(event, None) + if not event_dict: + del self._hook_events[hook] + + def _run_hooks(self, hooks: Iterable[HookProtocol], event: str, *args): + # TODO add hook ratelimiting + exceptions: list[BaseException] = [] + for func in hooks: + try: + func(event, *args) + + except Exception as e: # pylint: disable=invalid-name,broad-except + exceptions.append(e) + except BaseException as e: # pylint: disable=invalid-name,broad-except + exceptions.append(e) + break + + if exceptions: + raise BaseExceptionGroup( + f"Errors raised while running hooks for {event}", exceptions + ) + + def dispatch(self, event: str, *args): + """Dispatches an event to it associated hooks with the given arguments. + + Args: + event (str): The name of the event being dispatched. + *args: The arguments to pass to the event. + """ + empty_dict: dict[HookProtocol, None] = {} + local_hooks = self._event_hooks.get(event, empty_dict) + global_hooks = self._event_hooks.get(None, empty_dict) + + all_hooks = local_hooks | global_hooks + self._run_hooks(all_hooks, event, *args) diff --git a/sardine/handlers/__init__.py b/sardine/handlers/__init__.py new file mode 100644 index 00000000..5d96b5c9 --- /dev/null +++ b/sardine/handlers/__init__.py @@ -0,0 +1,10 @@ +from .midi import * +from .midi_in import * +from .missile import * +from .osc import * +from .osc_in import * +from .osc_loop import * +from .player import * +from .sender import * +from .sleep_handler import * +from .superdirt import * diff --git a/sardine/handlers/midi.py b/sardine/handlers/midi.py new file mode 100644 index 00000000..55fd5be6 --- /dev/null +++ b/sardine/handlers/midi.py @@ -0,0 +1,276 @@ +import asyncio +import sys +from typing import Optional, Union + +import mido +from rich import print + +from ..utils import alias_param +from .sender import Number, NumericElement, Sender + +__all__ = ("MidiHandler",) + + +class MidiHandler(Sender): + + """ + MidiHandler: a class capable of reacting to most MIDI Messages. + """ + + def __init__(self, port_name: str = "Sardine", nudge: float = 0.0): + super().__init__() + self.active_notes: dict[tuple[int, int], asyncio.Task] = {} + + # Setting up the MIDI Connexion + self._available_ports = mido.get_output_names() + self._port_name = port_name + self._midi = None + + # For MacOS/Linux + if sys.platform not in "win32": + if self._port_name in ["Sardine", "internal"]: + self._midi = mido.open_output("Sardine", virtual=True) + else: + try: + self._midi = mido.open_output(self._port_name, virtual=False) + except Exception as e: # TODO what error are we trying to catch here? + self._midi = mido.open_output( + self._available_ports[0], virtual=False + ) + self._port_name = str(self._available_ports[0]) + + # For W10/W11 + else: + try: + self._midi = mido.open_output(self._available_ports[0]) + self._port_name = str(self._available_ports[0]) + except Exception as err: + print(f"[red]Failed to open a MIDI Connexion: {err}") + + # Setting up the handler + self._nudge = nudge + self.events = { + "start": self._start, + "continue": self._continue, + "stop": self._stop, + "reset": self._reset, + "clock": self._clock, + "note_on": self._note_on, + "note_off": self._note_off, + "aftertouch": self._aftertouch, + "polytouch": self._polytouch, + "control_change": self._control_change, + "program_change": self._program_change, + "sysex": self._sysex, + "pitchwheel": self._pitch_wheel, + } + + def __repr__(self) -> str: + return f"<{type(self).__name__} port={self._port_name!r} nudge={self._nudge}>" + + def setup(self): + for event in self.events: + self.register(event) + + def hook(self, event: str, *args): + func = self.events[event] + func(*args) + + def _start(self, *args) -> None: + self._midi.send(mido.Message("start")) + + def _continue(self, *args) -> None: + self._midi.send(mido.Message("continue")) + + def _stop(self, *args) -> None: + self._midi.send(mido.Message("stop")) + + def _reset(self, *args) -> None: + self._midi.send(mido.Message("reset")) + + def _clock(self, *args) -> None: + self._midi.send(mido.Message("clock")) + + def _note_on(self, channel: int, note: int, velocity: int) -> None: + self._midi.send( + mido.Message("note_on", channel=channel, note=note, velocity=velocity) + ) + + def _note_off(self, channel: int, note: int, velocity: int) -> None: + self._midi.send( + mido.Message("note_off", channel=channel, note=note, velocity=velocity) + ) + + def _polytouch(self, channel: int, note: int, value: int) -> None: + self._midi.send( + mido.Message("polytouch", channel=channel, note=note, value=value) + ) + + def _aftertouch(self, channel: int, value: int) -> None: + self._midi.send(mido.Message("aftertouch", channel=channel, value=value)) + + def _control_change(self, channel: int, control: int, value: int) -> None: + self._midi.send( + mido.Message( + "control_change", channel=channel, control=control, value=value + ) + ) + + def _program_change(self, program: int, channel: int) -> None: + self._midi.send( + mido.Message("program_change", program=program, channel=channel) + ) + + def _sysex(self, data: bytearray, time: int = 0) -> None: + self._midi.send(mido.Message("sysex", data=data, time=time)) + + def _pitch_wheel(self, pitch: int, channel: int) -> None: + self._midi.send(mido.Message("pitchweel", pitch=pitch, channel=channel)) + + async def send_off( + self, note: int, channel: int, velocity: int, delay: Union[int, float] + ): + await self.env.sleep(delay) + self._midi.send( + mido.Message("note_off", note=note, channel=channel, velocity=velocity) + ) + self.active_notes.pop((note, channel), None) + + def all_notes_off(self): + """ + Panic button for MIDI notes on every channel. Is there a message for this? + """ + for note in range(0, 128): + for channel in range(0, 16): + self._midi.send( + mido.Message("note_off", note=note, velocity=0, channel=channel) + ) + + def send_midi_note( + self, note: int, channel: int, velocity: int, duration: float + ) -> None: + """ + Function in charge of handling MIDI note sending. This also includes various + corner cases and typical MIDI note management such as: + - handling duration by clever combining 'note_on' and 'note_off' events. + - retriggering: turning a note off and on again if the note is repeated before + the end of its previously defined duration. + """ + + key = (note, channel) + note_task = self.active_notes.get(key) + + if note_task is not None and not note_task.done(): + # Brute force solution (temporary fix) + self._note_off(channel=channel, note=note, velocity=0) + note_task.cancel() + self.active_notes.pop(key, None) + + self._midi.send( + mido.Message( + "note_on", + note=int(note), + channel=int(channel), + velocity=int(velocity), + ) + ) + self.active_notes[key] = asyncio.create_task( + self.send_off( + note=note, delay=duration - 0.02, velocity=velocity, channel=channel + ) + ) + + @alias_param(name="channel", alias="chan") + @alias_param(name="duration", alias="dur") + @alias_param(name="velocity", alias="vel") + @alias_param(name="iterator", alias="i") + @alias_param(name="divisor", alias="d") + @alias_param(name="rate", alias="r") + def send( + self, + note: Optional[NumericElement] = 60, + velocity: NumericElement = 100, + channel: NumericElement = 0, + duration: NumericElement = 1, + iterator: Number = 0, + divisor: NumericElement = 1, + rate: NumericElement = 1, + ) -> None: + """ + This method is responsible for preparing the pattern message before sending it + to the output. This method serves as a template for all other similar 'senders' + around. It can handle both monophonic and polyphonic messages generated by the + parser. Any chord will be reduced to a list of dictionaries, transformed again + into a single MIDI message. + """ + + if note is None: + return + + pattern = { + "note": note, + "velocity": velocity, + "channel": channel, + "duration": duration, + } + for message in self.pattern_reduce(pattern, iterator, divisor, rate): + if message["note"] is None: + continue + for k in ("note", "velocity", "channel"): + message[k] = int(message[k]) + self.send_midi_note(**message) + + @alias_param(name="value", alias="val") + @alias_param(name="control", alias="ctrl") + @alias_param(name="channel", alias="chan") + @alias_param(name="iterator", alias="i") + @alias_param(name="divisor", alias="d") + @alias_param(name="rate", alias="r") + def send_control( + self, + control: Optional[NumericElement] = 0, + channel: NumericElement = 0, + value: NumericElement = 60, + iterator: Number = 0, + divisor: NumericElement = 1, + rate: NumericElement = 1, + ) -> None: + """ + Variant of the 'send' function specialized in sending control changes. See the + 'send' method for more information. + """ + + if control is None: + return + + pattern = {"control": control, "channel": channel, "value": value} + for message in self.pattern_reduce(pattern, iterator, divisor, rate): + if message["control"] is None: + continue + for k, v in message.items(): + message[k] = int(v) + self._control_change(**message) + + @alias_param(name="number", alias="num") + @alias_param(name="channel", alias="chan") + @alias_param(name="iterator", alias="i") + @alias_param(name="divisor", alias="d") + @alias_param(name="rate", alias="r") + def send_program( + self, + channel: Optional[NumericElement], + number: NumericElement = 60, + iterator: Number = 0, + divisor: NumericElement = 1, + rate: NumericElement = 1, + ) -> None: + if channel is None: + return + + pattern = {"channel": channel, "program": number} + for message in self.pattern_reduce(pattern, iterator, divisor, rate): + if message["channel"] is None: + continue + for k, v in message.items(): + message[k] = int(v) + self._program_change(**message) diff --git a/sardine/io/MidiListener.py b/sardine/handlers/midi_in.py similarity index 68% rename from sardine/io/MidiListener.py rename to sardine/handlers/midi_in.py index 0089a7f8..31f19a5f 100644 --- a/sardine/io/MidiListener.py +++ b/sardine/handlers/midi_in.py @@ -1,12 +1,14 @@ -import mido -from mido import Message, open_input, get_input_names, parse_string_stream -from rich import print -from typing import Optional, Union from collections import deque from dataclasses import dataclass +from typing import Optional, Union + +import mido +from mido import Message, get_input_names, open_input, parse_string_stream +from rich import print +from ..base.handler import BaseHandler -__all__ = ("MidiListener", "ClockListener", "ControlTarget", "NoteTarget") +__all__ = ("MidiInHandler", "ControlTarget", "NoteTarget") @dataclass @@ -20,17 +22,19 @@ class NoteTarget: channel: int -class MidiListener: - """MIDI-In Listener""" +class MidiInHandler(BaseHandler): + """ + MIDI-In Listener: listen to incoming MIDI events from a selected port. + Useful for mapping controllers to control / interact with Sardine. + """ def __init__( self, target: Union[ControlTarget, NoteTarget, None] = None, port: Optional[str] = None, ): - + super().__init__() self.target = target - self.queue = deque(maxlen=20) self._last_item: Optional[Message] = None self._last_value = 0 @@ -39,7 +43,6 @@ def __init__( try: self._input = open_input(port) self._input.callback = self._callback - print(f"MidiListener: listening on port {port}") except Exception: raise OSError(f"Couldn't listen on port {port}") else: @@ -47,13 +50,12 @@ def __init__( self._input = open_input() self._input.callback = self._callback listened_port = mido.get_input_names()[0] - print(f"MidiListener: listening on port {listened_port}") except Exception: raise OSError(f"Couldn't listen on port {port}") def __str__(self): """String representation of the MIDI Listener""" - return f"" + return f"" def _callback(self, message): """Callback for MidiListener Port""" @@ -84,7 +86,9 @@ def _get_note(self, channel: int) -> None: self._last_item = self._last_item def _extract_value(self, message: Union[mido.Message, None]) -> Union[Message, int]: - """Given a mido.Message, extract needed value based on message type""" + """ + Given a mido.Message, extract needed value based on message type + """ if message is None: return 0 @@ -120,33 +124,3 @@ def inspect_queue(self): def kill(self): """Close the MIDIListener""" self._input.close() - - -class ClockListener: - def __init__(self, port: Optional[str] = None): - print("Creating MIDIClock Listener") - self.queue = deque(maxlen=180) - - if port: - try: - self._input = open_input(port) - print(f"MidiListener: listening on port {port}") - except Exception: - raise OSError(f"Couldn't listen on port {port}") - else: - try: - self._input = open_input() - listened_port = mido.get_input_names()[0] - print(f"MidiListener: listening on port {listened_port}") - except Exception: - raise OSError(f"Couldn't listen on port {port}") - - def wait_for_tick(self): - """Wait for a tick coming from the MIDI Port""" - while True: - msg = self._input.poll() - if msg is None: - continue - elif msg.type == "clock": - break - return msg diff --git a/sardine/handlers/missile.py b/sardine/handlers/missile.py new file mode 100644 index 00000000..5487a027 --- /dev/null +++ b/sardine/handlers/missile.py @@ -0,0 +1,40 @@ +import asyncio +from typing import Optional, Union + +from ..base import BaseHandler + +__all__ = ("MissileMode",) + + +class MissileMode(BaseHandler): + """Maximize the current thread's wake time with a CPU-intensive task.""" + + def __init__(self, *, burn_rate: Union[float, int] = 1000): + super().__init__() + self.burn_interval = 1 / burn_rate + self._running = False + self._run_task: Optional[asyncio.Task] = None + + def is_running(self) -> bool: + return self._run_task is not None and not self._run_task.done() + + async def run(self): + self._running = True + while self._running: + await asyncio.sleep(self.burn_interval) + + # Handler hooks + + def setup(self): + for event in ("start", "pause", "resume", "stop"): + self.register(event) + + def teardown(self): + if self.is_running(): + self._run_task.cancel() + + def hook(self, event: str, *args): + if event in ("start", "resume") and not self.is_running(): + self._run_task = asyncio.create_task(self.run()) + elif event in ("stop", "pause") and self.is_running(): + self._run_task.cancel() diff --git a/sardine/handlers/osc.py b/sardine/handlers/osc.py new file mode 100644 index 00000000..e352ab5e --- /dev/null +++ b/sardine/handlers/osc.py @@ -0,0 +1,75 @@ +import time +from itertools import chain +from typing import Optional + +from osc4py3 import oscbuildparse +from osc4py3.as_eventloop import * +from osc4py3.oscmethod import * + +from ..utils import alias_param +from .osc_loop import OSCLoop +from .sender import Number, NumericElement, Sender, StringElement + +__all__ = ("OSCHandler",) + + +class OSCHandler(Sender): + def __init__( + self, + loop: OSCLoop, + ip: str = "127.0.0.1", + port: int = 23456, + name: str = "OSCSender", + ahead_amount: float = 0.0, + ): + super().__init__() + self.loop = loop + loop.add_child(self) + + # Setting up OSC Connexion + self._ip, self._port, self._name = (ip, port, name) + self._ahead_amount = ahead_amount + self.client = osc_udp_client(address=self._ip, port=self._port, name=self._name) + self._events = {"send": self._send} + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self._name} ip={self._ip!r} port={self._port}>" + + def setup(self): + for event in self._events: + self.env.register_hook(event, self) + + def hook(self, event: str, *args): + func = self._events[event] + func(*args) + + def _send(self, address: str, message: list) -> None: + msg = oscbuildparse.OSCMessage(address, None, message) + bun = oscbuildparse.OSCBundle( + oscbuildparse.unixtime2timetag(time.time() + self._ahead_amount), + [msg], + ) + osc_send(bun, self._name) + + @alias_param(name="iterator", alias="i") + @alias_param(name="divisor", alias="d") + @alias_param(name="rate", alias="r") + def send( + self, + address: Optional[StringElement], + iterator: Number = 0, + divisor: NumericElement = 1, + rate: NumericElement = 1, + **pattern: NumericElement, + ) -> None: + + if address is None: + return + + pattern["address"] = address + for message in self.pattern_reduce(pattern, iterator, divisor, rate): + if message["address"] is None: + continue + address = message.pop("address") + serialized = list(chain(*sorted(message.items()))) + self._send(f"/{address}", serialized) diff --git a/sardine/handlers/osc_in.py b/sardine/handlers/osc_in.py new file mode 100644 index 00000000..839741c0 --- /dev/null +++ b/sardine/handlers/osc_in.py @@ -0,0 +1,109 @@ +from typing import Any, Callable, Optional, Union + +from osc4py3.as_eventloop import * +from osc4py3.oscchannel import TransportChannel, get_channel +from osc4py3.oscmethod import * +from rich import print + +from ..base.handler import BaseHandler +from .osc_loop import OSCLoop + +__all__ = ("OSCInHandler",) + + +def flatten(l): + if isinstance(l, (list, tuple)): + if len(l) > 1: + return [l[0]] + flatten(l[1:]) + else: + return l[0] + else: + return [l] + + +class OSCInHandler(BaseHandler): + def __init__( + self, + loop: OSCLoop, + ip: str = "127.0.0.1", + port: int = 11223, + name: str = "OSCIn", + ): + super().__init__() + self.loop = loop + loop.add_child(self) + + self._ip, self._port, self._name = ip, port, name + self._watched_values = {} + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self._name} ip={self._ip!r} port={self._port}>" + + # Handler methods + + def setup(self): + osc_udp_server(self._ip, self._port, self._name) + + def teardown(self): + channel: Optional[TransportChannel] = get_channel(self._name) + if channel is not None: + channel.terminate() + + # Interface + + def _generic_store(self, address) -> None: + """Generic storage function to attach to a given address""" + + def generic_value_tracker(*args, **kwargs): + """Generic value tracker to be attached to an address""" + self._watched_values[address] = {"args": flatten(args), "kwargs": kwargs} + return (args, kwargs) + + osc_method(address, generic_value_tracker, argscheme=OSCARG_DATA) + + def watch(self, address: str): + """ + Watch the value of a given OSC address. Will be recorded in memory + in the self._watched_values dictionary accessible through the get() + method + """ + print(f"[yellow]Watching address [red]{address}[/red].[/yellow]") + self._generic_store(address) + + def attach(self, address: str, function: Callable, watch: bool = False): + """ + Attach a callback to a given address. You can also toggle the watch + boolean value to tell if the value should be tracked by the receiver. + It allows returning values from the callback to be retrieved later in + through the get(address) method. + """ + print( + f"[yellow]Attaching function [red]{function.__name__}[/red] to address [red]{address}[/red][/yellow]" + ) + osc_method(address, function) + if watch: + self.watch(address) + + def remote(self, address: str): + """ + Remote for controlling Sardine from an external client by talking directly to + the fish_bowl dispatch system. If the address matches an internal function de- + clared by some handler, the dispatch function will be called and *args will be + forwarded as well. + + address: address matching to a dispatch function (like 'pause', 'stop', etc..) + """ + print("Attaching address to matching incoming message") + + def event_dispatcher(address, *args) -> None: + print(f"Event Name: {address}") + self.env.dispatch(address, *args) + + osc_method(address, event_dispatcher, argscheme=OSCARG_DATA) + + def get(self, address: str) -> Union[Any, None]: + """Get a watched value. Return None if not found""" + try: + return self._watched_values[address] + except KeyError: + return None diff --git a/sardine/handlers/osc_loop.py b/sardine/handlers/osc_loop.py new file mode 100644 index 00000000..fb5bb919 --- /dev/null +++ b/sardine/handlers/osc_loop.py @@ -0,0 +1,19 @@ +from osc4py3.as_eventloop import osc_process, osc_startup, osc_terminate + +from ..base import BaseRunnerHandler, BaseThreadedLoopMixin + +__all__ = ("OSCLoop",) + + +class OSCLoop(BaseThreadedLoopMixin, BaseRunnerHandler): + def __init__(self, *, loop_interval: float = 0.001): + super().__init__(loop_interval=loop_interval) + + def before_loop(self): + osc_startup() + + def loop(self): + osc_process() + + def after_loop(self): + osc_terminate() diff --git a/sardine/handlers/player.py b/sardine/handlers/player.py new file mode 100644 index 00000000..ede76917 --- /dev/null +++ b/sardine/handlers/player.py @@ -0,0 +1,174 @@ +from dataclasses import dataclass +from typing import Any, Callable, Optional, ParamSpec, TypeVar + +from ..base import BaseHandler +from ..handlers.sender import Number, NumericElement, Sender +from ..scheduler import AsyncRunner +from ..utils import alias_param, get_snap_deadline + +__all__ = ("Player",) + +P = ParamSpec("P") +T = TypeVar("T") + + +@dataclass +class PatternInformation: + sender: Sender + send_method: Callable[P, T] + args: tuple[Any] + kwargs: dict[str, Any] + period: NumericElement + iterator: Number + divisor: NumericElement + rate: NumericElement + snap: Number + timespan: Optional[float] + + +class Player(BaseHandler): + + """ + Players are holders used to support one-line specialised swimming functions. Many + instances of 'Player' are injected in globals() at boot time as a way to provide a + quick interface for the user to output musical and data patterns. Players are han- + dling the whole lifetime of a pattern, from its initial entry in the scheduler to + its death when the silence() or panic() method is called. + """ + + def __init__(self, name: str): + super().__init__() + self._name = name + self.runner = AsyncRunner(name=name) + self._iteration_span: int = 1 + self._period: int | float = 1.0 + + @property + def iterator(self) -> int: + """Internal iterator stored by the Player instance""" + return self._iteration_span + + @iterator.setter + def iterator(self, value: int) -> None: + """Internal iterator stored by the Player instance""" + self._iteration_span = value + + def fit_period_to_timespan(self, period: NumericElement, timespan: float): + """ + Fit a given period to a certain timestamp (forcing a pattern to have a fixed + duration. This feature can be useful for preventing users from creating loops + that will phase out too easily. + """ + + def _remap(x, in_min, in_max, out_min, out_max): + """Remap a value v from range (x, y) to range (x', y')""" + return (x - in_min) * (out_max - out_min) / (in_max - in_min) + out_min + + if isinstance(period, (int, float)): + return _remap(period, 0, period, 0, timespan) + + period = self.env.parser.parse(period) + period = list(map(lambda x: _remap(x, 0, sum(period), 0, timespan), period)) + return period + + @staticmethod + @alias_param(name="period", alias="p") + @alias_param(name="iterator", alias="i") + @alias_param(name="divisor", alias="d") + @alias_param(name="rate", alias="r") + @alias_param(name="timespan", alias="span") + def play( + sender: Sender, + send_method: Callable[P, T], + *args: P.args, + timespan: Optional[float] = None, + period: NumericElement = 1, + iterator: Number = 0, + divisor: NumericElement = 1, + rate: NumericElement = 1, + snap: Number = 0, + **kwargs: P.kwargs, + ): + """Entry point of a pattern into the Player""" + + return PatternInformation( + sender, + send_method, + args, + kwargs, + period, + iterator, + divisor, + rate, + snap, + timespan, + ) + + def __rshift__(self, pattern: Optional[PatternInformation]) -> None: + """ + This method acts as a cosmetic disguise for feeding PatternInformation into a + given player. Its syntax is inspired by FoxDot (Ryan Kirkbride), another very + popular live coding library. + """ + if pattern is not None and pattern.timespan is not None: + pattern.period = self.fit_period_to_timespan( + pattern.period, pattern.timespan + ) + self.push(pattern) + + def get_new_period(self, pattern: PatternInformation) -> Number: + """Get period value for the current cycle""" + for message in pattern.sender.pattern_reduce( + {"period": pattern.period}, + pattern.iterator, + pattern.divisor, + pattern.rate, + use_divisor_to_skip=False, + ): + return message["period"] + return 1 + + def func( + self, + pattern: PatternInformation, + p: NumericElement = 1, # pylint: disable=invalid-name,unused-argument + ) -> None: + """Central swimming function defined by the player""" + + pattern.send_method( + *pattern.args, + **pattern.kwargs, + iterator=pattern.iterator, + divisor=pattern.divisor, + rate=pattern.rate, + ) + + pattern.iterator += self._iteration_span + period = self.get_new_period(pattern) + self.again(pattern=pattern, p=period) + + def push(self, pattern: Optional[PatternInformation]): + """ + Managing lifetime of the pattern, similar to managing a swimming function + manually. If PatternInformation is hot-swapped by None, the Player will stop + scheduling its internal function, defined in self.func. + """ + # This is a local equivalent to the silence() function. + if pattern is None: + return self.env.scheduler.stop_runner(self.runner) + + # Forcibly reset the interval shift back to 0 to make sure + # the new pattern can be synchronized + self.runner.interval_shift = 0.0 + + period = self.get_new_period(pattern) + + deadline = get_snap_deadline(self.env.clock, pattern.snap) + self.runner.push_deferred(deadline, self.func, pattern=pattern, p=period) + + self.env.scheduler.start_runner(self.runner) + self.runner.reload() + + def again(self, *args, **kwargs): + self.runner.update_state(*args, **kwargs) + self.runner.swim() diff --git a/sardine/handlers/sender.py b/sardine/handlers/sender.py new file mode 100644 index 00000000..8d0acdc1 --- /dev/null +++ b/sardine/handlers/sender.py @@ -0,0 +1,141 @@ +from math import floor +from typing import Generator, TypeVar, Union + +from ..base import BaseHandler + +__all__ = ("Sender",) + +Number = Union[float, int] +ReducedElement = TypeVar("ReducedElement") +RecursiveElement = Union[ReducedElement, list] # assume list is list[RecursiveElement] +ParsableElement = Union[RecursiveElement, str] + +# Sub-types of ParsableElement +NumericElement = Union[Number, list, str] +StringElement = Union[str, list] # assume list is list[StringElement] + +Pattern = dict[str, list[ParsableElement]] +ReducedPattern = dict[str, ReducedElement] + + +def _maybe_index(val: RecursiveElement, i: int) -> RecursiveElement: + if not isinstance(val, list): + return val + + length = len(val) + return val[i % length] + + +def _maybe_length(val: RecursiveElement) -> int: + if isinstance(val, list): + return len(val) + return 0 + + +class Sender(BaseHandler): + + """ + Handlers can inherit from 'Sender' if they are in charge of some output operation. + Output operations in Sardine generally involve some amount of pattern parsing and + monophonic/polyphonic message composition. This class implements most of the inter- + nal behavior necessary for patterning. Each handler rely on these methods in the + final 'send' method called by the user. + + pattern_element: return the right index number for the pattern. + reduce_polyphonic_message: turn any dict pattern into a list of patterns. + pattern_reduce: reduce a pattern to a dictionary of values corresponding to iterator + index. + """ + + @staticmethod + def pattern_element( + val: RecursiveElement, + iterator: Number, + divisor: Number, + rate: Number, + ) -> RecursiveElement: + """Joseph Enguehard's algorithm for solving iteration speed""" + # For simplicity, we're allowing non-sequences to be passed through + if not isinstance(val, list): + return val + + length = len(val) + if length > 0: + i = floor(iterator * rate / divisor) % length + return val[i] + raise ValueError(f"Cannot pattern an empty sequence: {val!r}") + + def pattern_reduce( + self, + pattern: Pattern, + iterator: Number, + divisor: NumericElement = 1, + rate: NumericElement = 1, + *, + use_divisor_to_skip: bool = True, + ) -> Generator[ReducedPattern, None, None]: + """Reduces a pattern to an iterator yielding subpatterns. + + First, any string values are parsed using the fish bowl's parser. + Afterwards, if the pattern is a dictionary where none of its values + are lists, the pattern is wrapped in a list and returned, ignoring + the iterator/divisor/rate parameters. For example:: + + >>> pat = {"note": 60, "velocity": 100} + >>> list(sender.pattern_reduce(pat, 0, 1, 1)) + [{'note': 60, 'velocity': 100}] + + If it is a monophonic pattern, i.e. a dictionary where one or more + of its values are lists, the corresponding element of those lists + are indexed using the `pattern_element()` method which implements + Joseph Enguehard's algorithm:: + + >>> pat = {"note": [60, 70, 80, 90], "velocity": 100} + >>> for i in range(1, 4): + ... list(sender.pattern_reduce(pat, i, 2, 3)) + [{'note': 70, 'velocity': 100}] + [{'note': 90, 'velocity': 100}] + [{'note': 60, 'velocity': 100}] + + If it is a polyphonic pattern, i.e. a dictionary where one or more + of the values indexed by the above algorithm are also lists, the + elements of each list are paired together into several reduced patterns. + The number of messages is determined by the length of the longest list. + Any lists that are shorter than the longest list will repeat its + elements from the start to match the length of the longest list. + Any values that are not lists are simply repeated. + + When `use_divisor_to_skip` is True and the `divisor` is a number + other than 1, patterns are only generated if the iterator is + divisible by the divisor, and will otherwise yield zero messages. + """ + # TODO: more examples for pattern_reduce() + # TODO: document pattern_reduce() arguments + def maybe_parse(val: ParsableElement) -> RecursiveElement: + if isinstance(val, str): + return self.env.parser.parse(val) + return val + + if any(isinstance(n, (list, str)) for n in (divisor, rate)): + divisor, rate = next( + self.pattern_reduce({"divisor": divisor, "rate": rate}, iterator) + ).values() + + if use_divisor_to_skip and iterator % divisor != 0: + return + + pattern = {k: maybe_parse(v) for k, v in pattern.items()} + + for k, v in pattern.items(): + pattern[k] = self.pattern_element(v, iterator, divisor, rate) + + if not any(isinstance(v, list) for v in pattern.values()): + # Base case where we have a monophonic message + yield pattern + + # For polyphonic messages, recursively reduce them + # to a list of monophonic messages + max_length = max(_maybe_length(v) for v in pattern.values()) + for i in range(max_length): + sub_pattern = {k: _maybe_index(v, i) for k, v in pattern.items()} + yield from self.pattern_reduce(sub_pattern, iterator, divisor, rate) diff --git a/sardine/handlers/sleep_handler/__init__.py b/sardine/handlers/sleep_handler/__init__.py new file mode 100644 index 00000000..af627797 --- /dev/null +++ b/sardine/handlers/sleep_handler/__init__.py @@ -0,0 +1,182 @@ +import asyncio +import heapq +from collections import deque +from typing import Optional, Union + +from exceptiongroup import BaseExceptionGroup + +from ...base import BaseHandler +from .time_handle import * + +__all__ = ("SleepHandler", "TimeHandle") + +NUMBER = Union[float, int] + + +class SleepHandler(BaseHandler): + """The primary interface for other components to sleep. + + Args: + delta_record_size (int): + The maximum number of recordings to store when averaging the + delta for anti-drift. Set to 0 to disable drift correction. + WARNING: this is an experimental setting and may severely degrade + sleep accuracy when enabled. + poll_interval (float): + The polling interval to use when the current clock does not + support its own method of sleep. + """ + + def __init__( + self, + delta_record_size: int = 0, + poll_interval: float = 0.001, + ): + super().__init__() + + self.poll_interval = poll_interval + + self._poll_task: Optional[asyncio.Task] = None + self._interrupt_event = asyncio.Event() + self._wake_event = asyncio.Event() + self._time_handles: list[TimeHandle] = [] + self._previous_deltas: deque[float] = deque(maxlen=delta_record_size) + + def __repr__(self) -> str: + return f"<{type(self).__name__} interval={self.poll_interval}>" + + # Public methods + + async def sleep(self, duration: NUMBER): + """Sleeps for the specified duration.""" + deadline = self.env.clock.time + duration + return await self.sleep_until(deadline) + + async def sleep_until(self, deadline: NUMBER): + """Sleeps until the given time has been reached. + + The deadline is based on the fish bowl clock's time. + """ + if self.env is None: + raise ValueError("SleepHandler must be added to a fish bowl") + elif not self.env.is_running(): + raise RuntimeError("cannot use sleep until fish bowl has started") + + clock = self.env.clock + + while True: + # Handle stop/pauses before proceeding + if self._is_terminated(): + asyncio.current_task().cancel() + await self._wake_event.wait() + + corrected_deadline = deadline - self._get_avg_delta() + + # Use clock sleep if available, else polling implementation + if clock.can_sleep(): + sleep_task = asyncio.create_task( + clock.sleep(corrected_deadline - clock.time) + ) + else: + sleep_task = asyncio.create_task(self._sleep_until(corrected_deadline)) + + # Wait until sleep completes or interruption + intrp_task = asyncio.create_task(self._interrupt_event.wait()) + tasks = (sleep_task, intrp_task) + + done, pending = await asyncio.wait( + tasks, return_when=asyncio.FIRST_COMPLETED + ) + delta = clock.time - corrected_deadline + + for t in pending: + t.cancel() + + exceptions = (t.exception() for t in done) + exceptions = [exc for exc in exceptions if exc is not None] + if exceptions: + raise BaseExceptionGroup( + f"Error occurred while sleeping until {deadline = }", exceptions + ) + + if sleep_task in done: + self._previous_deltas.append(delta) + return + + # Internal methods + + def _get_avg_delta(self) -> float: + if self._previous_deltas: + return sum(self._previous_deltas) / len(self._previous_deltas) + return 0.0 + + def _check_running(self): + if self._time_handles and not self._is_polling(): + self._poll_task = asyncio.create_task(self._run_poll()) + elif not self._time_handles and self._is_polling(): + self._poll_task.cancel() + + def _create_handle(self, deadline: NUMBER) -> TimeHandle: + handle = TimeHandle(deadline) + + if self.env.clock.time >= deadline: + handle.fut.set_result(None) + else: + heapq.heappush(self._time_handles, handle) + self._check_running() + + return handle + + def _is_terminated(self) -> bool: + # This might be called after teardown, in which case `env` is None + return self.env is None or not self.env.is_running() + + def _is_polling(self) -> bool: + return self._poll_task is not None and not self._poll_task.done() + + async def _run_poll(self): + """Continuously polls the clock's time until all TimeHandles resolve. + + TimeHandles will resolve when their deadline is reached, + or they are cancelled. + + Note that when a pause/stop occurs, all `sleep_until()` calls + cancel the `_sleep_until()` task, which should indirectly + cancel the handle being awaited on. + """ + # this is implemented very similarly to asyncio.BaseEventLoop + while self._time_handles: + while self._time_handles: + handle = self._time_handles[0] + if handle.cancelled(): + heapq.heappop(self._time_handles) + elif self.env.clock.time >= handle.when: + handle.fut.set_result(None) + heapq.heappop(self._time_handles) + else: + # all handles afterwards are either still waiting or cancelled + break + await asyncio.sleep(self.poll_interval) + + async def _sleep_until(self, deadline: NUMBER): + await self._create_handle(deadline) + + # Handler hooks + + def setup(self): + for event in ("start", "pause", "resume", "stop"): + self.register(event) + + def teardown(self): + self._interrupt_event.set() + self._wake_event.set() # just in case + + def hook(self, event: str, *args): + if event in ("start", "resume"): + self._wake_event.set() + self._interrupt_event.clear() + if event == "pause": + self._interrupt_event.set() + self._wake_event.clear() + elif event == "stop": + self.teardown() diff --git a/sardine/handlers/sleep_handler/time_handle.py b/sardine/handlers/sleep_handler/time_handle.py new file mode 100644 index 00000000..a917e390 --- /dev/null +++ b/sardine/handlers/sleep_handler/time_handle.py @@ -0,0 +1,55 @@ +import asyncio +import functools + +__all__ = ("TimeHandle",) + + +@functools.total_ordering +class TimeHandle: + """A handle that can wait for a specified time on the fish bowl's clock.""" + + __slots__ = ("when", "fut") + + def __init__(self, deadline: int): + self.when = deadline + self.fut = asyncio.get_running_loop().create_future() + + def __repr__(self): + status = ( + "pending" + if not self.fut.done() + else "done" + if not self.fut.cancelled() + else "cancelled" + ) + + return "<{} {} when={}>".format( + type(self).__name__, + status, + self.when, + ) + + def __eq__(self, other): + if not isinstance(other, TimeHandle): + return NotImplemented + return self.when == other.when and self.fut == other.fut + + def __hash__(self): + return hash((self.when, self.fut)) + + def __lt__(self, other): + if not isinstance(other, TimeHandle): + return NotImplemented + return self.when < other.when + + def __await__(self): + return self.fut.__await__() + + def cancel(self) -> bool: + return self.fut.cancel() + + def cancelled(self) -> bool: + return self.fut.cancelled() + + def done(self) -> bool: + return self.fut.done() diff --git a/sardine/handlers/superdirt.py b/sardine/handlers/superdirt.py new file mode 100644 index 00000000..a2364aff --- /dev/null +++ b/sardine/handlers/superdirt.py @@ -0,0 +1,104 @@ +import time +from itertools import chain +from typing import Optional + +from osc4py3 import oscbuildparse +from osc4py3.as_eventloop import osc_send, osc_udp_client + +from ..utils import alias_param +from .osc_loop import OSCLoop +from .sender import Number, NumericElement, ParsableElement, Sender, StringElement + +__all__ = ("SuperDirtHandler",) + + +class SuperDirtHandler(Sender): + def __init__( + self, + *, + loop: OSCLoop, + name: str = "SuperDirt", + ahead_amount: float = 0.3, + ): + super().__init__() + self._name = name + self.loop = loop + loop.add_child(self) + + # Opening a new OSC Client to talk with it + self._osc_client = osc_udp_client( + address="127.0.0.1", port=57120, name=self._name + ) + self._ahead_amount = ahead_amount + + # Setting up environment + self._events = { + "dirt_play": self._dirt_play, + "panic": self._dirt_panic, + } + + def __repr__(self) -> str: + return f"" + + def setup(self): + for event in self._events: + self.register(event) + + def hook(self, event: str, *args): + func = self._events[event] + func(*args) + + def __send(self, address: str, message: list) -> None: + msg = oscbuildparse.OSCMessage(address, None, message) + bun = oscbuildparse.OSCBundle( + oscbuildparse.unixtime2timetag(time.time() + self._ahead_amount), + [msg], + ) + osc_send(bun, self._name) + + def __send_timed_message(self, address: str, message: list): + """Build and send OSC bundles""" + msg = oscbuildparse.OSCMessage(address, None, message) + bun = oscbuildparse.OSCBundle( + oscbuildparse.unixtime2timetag(time.time() + self._ahead_amount), + [msg], + ) + osc_send(bun, self._name) + + def _send(self, address, message): + self.__send(address=address, message=message) + + def _dirt_play(self, message: list): + self.__send_timed_message(address="/dirt/play", message=message) + + def _dirt_panic(self): + self._dirt_play(message=["sound", "superpanic"]) + + @alias_param(name="iterator", alias="i") + @alias_param(name="divisor", alias="d") + @alias_param(name="rate", alias="r") + def send( + self, + sound: Optional[StringElement], + orbit: NumericElement = 0, + iterator: Number = 0, + divisor: NumericElement = 1, + rate: NumericElement = 1, + **pattern: ParsableElement, + ): + + if sound is None: + return + + pattern["sound"] = sound + pattern["orbit"] = orbit + pattern["cps"] = round(self.env.clock.phase, 4) + pattern["cycle"] = ( + self.env.clock.bar * self.env.clock.beats_per_bar + ) + self.env.clock.beat + + for message in self.pattern_reduce(pattern, iterator, divisor, rate): + if message["sound"] is None: + continue + serialized = list(chain(*sorted(message.items()))) + self._dirt_play(serialized) diff --git a/sardine/io/MIDISender.py b/sardine/io/MIDISender.py deleted file mode 100644 index 85d479bd..00000000 --- a/sardine/io/MIDISender.py +++ /dev/null @@ -1,267 +0,0 @@ -#!/usr/bin/env python3 -import asyncio -from email import parser -import pprint -import functools -from typing import TYPE_CHECKING, Union, Optional -from ..sequences import ListParser -from math import floor -from .SenderLogic import pattern_element, compose_parametric_patterns -from ..sequences.LexerParser.Chords import Chord - -if TYPE_CHECKING: - from ..clock import Clock - from ..io import MIDIIo - - -class MIDISender: - def __init__( - self, - clock: "Clock", - midi_client: Optional["MIDIIo"] = None, - note: Union[int, float, str, Chord] = 60, - delay: Union[int, float, str, Chord] = 0.1, - velocity: Union[int, float, str, Chord] = 120, - channel: Union[int, float, str, Chord] = 0, - trig: Union[int, float, str] = 1, - at: Union[float, int] = 0, - nudge: Union[int, float] = 0.0, - ): - - self.clock = clock - self._number_parser, self._note_parser = (self.clock.parser, self.clock.parser) - self.content = {} - - if midi_client is None: - self.midi_client = self.clock._midi - else: - self.midi_client = midi_client - - self.trig = self.parse_initial_arguments(trig) - self.delay = self.parse_initial_arguments(delay) - self.velocity = self.parse_initial_arguments(velocity) - self.channel = self.parse_initial_arguments(channel) - self.note = self.parse_note(note) - self.after: int = at - self._nudge: Union[int, float] = nudge - - def parse_initial_arguments(self, argument): - """Parse arguments at __init__ time""" - if isinstance(argument, str): - return self._parse_number_pattern(argument) - else: - return argument - - def parse_note(self, argument): - """Parse arguments at __init__ time""" - if isinstance(argument, str): - return self._note_parser.parse(argument) - else: - return argument - - def _parse_number_pattern(self, pattern: str): - """Pre-parse MIDI params during __init__""" - return self._number_parser.parse(pattern) - - def __str__(self): - """String representation of a sender content""" - pat = { - "note": int(self.note), - "delay": self.delay, - "velocity": int(self.velocity), - "channel": int(self.channel), - } - return f"{self.midi_client}: {pprint.pformat(pat)}" - - # ------------------------------------------------------------------------ - # GENERIC Mapper: make parameters chainable! - - def schedule(self, message): - """ - Higher logic of the schedule function. Is able to send both monophonic - and polyphonic messages - """ - # Analyse message to find chords lying around - def chords_in_message(message: list) -> bool: - return any(isinstance(x, Chord) for x in message) - - def longest_list_in_message(message: list) -> int: - return max(len(x) if isinstance(x, (Chord, list)) else 1 for x in message) - - def clamp_everything_to_midi_range(message: list) -> list: - """Clamp every value to MIDI Range (0-127)""" - - def _clamp(n, smallest, largest): - return max(smallest, min(n, largest)) - - new_list = [] - for _ in message: - if isinstance(_, str): - new_list.append(_) - elif isinstance(_, (float, int)): - new_list.append(_clamp(_, 0, 127)) - elif isinstance(_, Chord): - new_list.append(_._clamp()) - else: - new_list.append(_) - return new_list - - # Clamping values for safety - message = clamp_everything_to_midi_range(message) - - if chords_in_message(message): - # We need to compose len(longest_list) messages - longest_list = longest_list_in_message(message) - list_of_messages = [] - for _ in range(0, longest_list): - note_message = [ - x if not isinstance(x, Chord) else x[_] for x in message - ] - list_of_messages.append(note_message) - for message in list_of_messages: - self._schedule(dict(zip(message[::2], message[1::2]))) - else: - self._schedule(dict(zip(message[::2], message[1::2]))) - - def _schedule(self, message): - async def _waiter(): - await asyncio.sleep(self._nudge) - await handle - asyncio.create_task( - self.midi_client.note( - delay=message.get("delay"), - note=int(message.get("sound")), - velocity=int(message.get("velocity")), - channel=int(message.get("channel")), - ) - ) - - ticks = self.clock.get_beat_ticks(self.after, sync=True) - # Beat synchronization is disabled since `self.after` - # is meant to offset us from the current time - handle = self.clock.wait_after(n_ticks=ticks) - asyncio.create_task(_waiter(), name="midi-scheduler") - - def out(self, i: int = 0, div: int = 1, rate: int = 1) -> None: - """ - Prototype for the Sender output. - """ - if i % div != 0: - return - i = int(i) - final_message = [] - - # Mimicking the SuperDirtSender behavior - self.sound = self.note - self.content |= { - "delay": self.delay, - "velocity": self.velocity, - "channel": self.channel, - "trig": self.trig, - } - - def _message_without_iterator(): - """Compose a message if no iterator is given""" - composite_tokens = (list, Chord) - single_tokens = (type(None), str, int, float) - - # ================================================================= - # HANDLING THE SOUND PARAMETER - # ================================================================= - - if self.sound == []: - return - - # Handling lists and chords - if isinstance(self.sound, composite_tokens): - first_element = self.sound[0] - if first_element is not None: - final_message.extend(["sound", self.sound[0]]) - else: - return - # Handling other representations (str, None) - elif isinstance(self.sound, single_tokens): - if self.sound is None: - return - else: - final_message.extend(["sound", self.sound]) - - # ================================================================= - # HANDLING OTHER PARAMETERS - # ================================================================= - - # Handling other non-essential keys - for key, value in self.content.items(): - # We don't care if there is no value, just drop it - if value == []: - continue - if isinstance(value, composite_tokens): - value = value[0] - final_message.extend([key, value]) - - # ================================================================= - # TRIGGER MANAGEMENT - # ================================================================= - - if "trig" not in final_message: - final_message.extend(["trig", 1]) - - trig_value = final_message[final_message.index("trig") + 1] - if trig_value: - return self.schedule(final_message) - - def _message_with_iterator(): - """Compose a message if an iterator is given""" - composite_tokens = (list, Chord) - single_tokens = (type(None), str, float, int) - - # ================================================================= - # HANDLING THE SOUND PARAMETER - # ================================================================= - if self.sound == []: - return - if isinstance(self.sound, composite_tokens): - new_element = self.sound[ - pattern_element(iterator=i, div=div, rate=rate, pattern=self.sound) - ] - if new_element is None: - return - else: - final_message.extend(["sound", new_element]) - elif isinstance(self.sound, single_tokens): - if self.sound is None: - return - else: - final_message.extend(["sound", self.sound]) - else: - if self.sound is None: - return - else: - final_message.extend(["sound", self.sound]) - - # ================================================================= - # HANDLING OTHER PARAMETERS - # ================================================================= - - pattern_result = compose_parametric_patterns( - div=div, rate=rate, iterator=i, items=self.content.items() - ) - final_message.extend(pattern_result) - - # ================================================================= - # TRIGGER MANAGEMENT - # ================================================================= - - # Trig must always be included - if "trig" not in final_message: - final_message.extend(["trig", 1]) - - trig_value = final_message[final_message.index("trig") + 1] - if trig_value: - return self.schedule(final_message) - - # Ultimately composing and sending message - if i == 0: - return _message_without_iterator() - else: - return _message_with_iterator() diff --git a/sardine/io/MarkdownReader.py b/sardine/io/MarkdownReader.py deleted file mode 100644 index 73dbf8d1..00000000 --- a/sardine/io/MarkdownReader.py +++ /dev/null @@ -1,23 +0,0 @@ -from rich.markdown import Markdown -from rich.console import Console -import platform -import pathlib -from webbrowser import open as web_open - -__all__ = ("open_sardinopedia", "print_sardinopedia") - -if platform.system() in ["Linux", "Darwin"]: - SARDINOPEDIA_PATH = pathlib.Path(__file__).parents[2] / "docs/sardinopedia.md" -else: - SARDINOPEDIA_PATH = pathlib.Path(__file__).parents[2] / "docs\\sardinopedia.md" - - -def print_sardinopedia(): - with open(SARDINOPEDIA_PATH, "r") as sardinopedia: - console = Console() - console.print(Markdown(sardinopedia.read())) - - -def open_sardinopedia(): - """Open the Sardinopedia file in external app""" - web_open("file://" + str(SARDINOPEDIA_PATH)) diff --git a/sardine/io/MidiIo.py b/sardine/io/MidiIo.py deleted file mode 100644 index 81f5780a..00000000 --- a/sardine/io/MidiIo.py +++ /dev/null @@ -1,196 +0,0 @@ -from typing import Union, TYPE_CHECKING -from rich import print as rich_print -from rich.console import Console -import threading -import asyncio -import mido -import sys - -if TYPE_CHECKING: - from ..clock import Clock - -__all__ = ("MIDIIo",) - - -class MidiNoteEvent: - def __init__(self, event, due): - self.event = event - self.due = due - - def __repr__(self) -> str: - return f"Due: {self.due}, Event: {self.event}" - - -class MIDIIo(threading.Thread): - """ - Direct MIDI I/O Using Mido. MIDI is also available indirectly - through SuperDirt. I need to do something to address the redun- - dancy. - """ - - def __init__( - self, - clock: "Clock", - port_name: Union[str, None] = None, - at: Union[float, int] = 0, - ): - """Open a MIDI Output Port. A name can be given, corresponding to - the name of a valid currently opened MIDI port on the given system. - If the name is invalid or if the port couldn't be found, the user - will be faced with a prompt allowing him to select one of the currently - detected ports. - - Alternatively, if port_name is configured as "Sardine" in the config, - a new virtual port will spawn, named Sardine. - """ - - threading.Thread.__init__(self) - - self._midi_ports = mido.get_output_names() - self.port_name = port_name - self.clock = clock - self.after: int = at - self._midi = None - self._events = {} - - # For MacOS/Linux - if sys.platform not in "win32": - if self.port_name in ["Sardine", "internal"]: - self._midi = mido.open_output("Sardine", virtual=True) - elif self.port_name: - self.try_opening_midi_port(name=port_name) - else: - self._midi = mido.open_output("Sardine", virtual=True) - # For W10/W11 - else: - try: - self.try_opening_midi_port(name=port_name) - except Exception as err: - print(f"[red]Failed to open a MIDI Connexion: {err}") - - def try_opening_midi_port(self, name: str): - """ - Try to open a MIDI Port. Fallback to _choose_midi_port - (MIDI Port picker) if provided port name is invalid. - """ - try: - self._midi = mido.open_output(name) - except Exception as error: - rich_print(f"[bold red]Init error: {error}[/bold red]") - self._midi = mido.open_output(self._choose_midi_port()) - - @staticmethod - def _choose_midi_port() -> str: - """ASCII MIDI Port chooser""" - ports = mido.get_output_names() - console = Console() - for (i, item) in enumerate(ports, start=1): - rich_print(f"[color({i})] {item} [{i}]") - rich_print( - "[red]Note: you don't have to hand pick your MIDI Port manually every time." - ) - rich_print("[red]Check sardine-config to enter a permanent default MIDI port.") - nb = console.input("[bold yellow] Choose a MIDI Port: [/bold yellow]") - try: - nb = int(nb) - 1 - rich_print(f"[yellow]You picked[/yellow] [green]{ports[nb]}[/green].") - return ports[nb] - except Exception: - rich_print(f"Input can only take valid number in range, not {nb}.") - sys.exit() - - def _process_events(self): - """MIDI Events to be processed every tick by the clock""" - to_remove = [] - for key, item in list(self._events.items()): - item.due -= 1 - if item.due <= 0: - self.schedule(item.event) - to_remove.append(key) - for e in to_remove: - del self._events[e] - - def send(self, message: mido.Message) -> None: - self._midi.send(message) - - async def send_async(self, message: mido.Message) -> None: - self._midi.send(message) - - def send_stop(self) -> None: - """MIDI Start message""" - self._midi.send(mido.Message("stop")) - - def send_reset(self) -> None: - """MIDI Reset message""" - self._midi.send(mido.Message("reset")) - - def send_clock(self) -> None: - """MIDI Clock Message""" - self._midi.send(mido.Message("clock")) - - async def send_start(self, initial: bool = False) -> None: - """MIDI Start message""" - self._midi.send(mido.Message("start")) - - def schedule(self, message, delay: Union[int, float, None] = None): - async def _waiter(): - await handle - if delay is not None: - await asyncio.sleep(delay) - self.send(message) - - ticks = self.clock.get_beat_ticks(self.after, sync=False) - handle = self.clock.wait_after(n_ticks=ticks) - asyncio.create_task(_waiter(), name="midi-scheduler") - - async def note( - self, - delay: Union[int, float], - note: int = 60, - velocity: int = 127, - channel: int = 1, - ) -> None: - """Send a MIDI Note through principal MIDI output""" - note_id = f"{note}{channel}" - self._events[note_id + "on"] = MidiNoteEvent( - due=0, - event=mido.Message( - "note_on", note=note, channel=channel, velocity=velocity - ), - ) - if note_id + "off" in self._events.keys(): - self.schedule( - mido.Message("note_off", note=note, channel=channel, velocity=0), 0.0 - ) - self._events[note_id + "off"] = MidiNoteEvent( - due=delay, - event=mido.Message("note_off", note=note, channel=channel, velocity=0), - ) - else: - self._events[note_id + "off"] = MidiNoteEvent( - due=delay, - event=mido.Message("note_off", note=note, channel=channel, velocity=0), - ) - - async def control_change(self, channel, control, value) -> None: - """Control Change message""" - self.schedule( - mido.Message( - "control_change", - channel=int(channel), - control=int(control), - value=int(value), - ) - ) - - async def program_change(self, channel, program) -> None: - """Program change message""" - self.schedule(mido.Message("program_change", program=program, channel=channel)) - - async def pitchwheel(self, channel, pitch) -> None: - """Program change message""" - self.schedule(mido.Message("pitchweel", pitch=pitch, channel=channel)) - - async def sysex(self, data: list[int]) -> None: - """Custom User Sysex message""" - self.schedule(mido.Message("sysex", data=bytearray(data), time=0)) diff --git a/sardine/io/OSCSender.py b/sardine/io/OSCSender.py deleted file mode 100644 index 5f0e47bd..00000000 --- a/sardine/io/OSCSender.py +++ /dev/null @@ -1,198 +0,0 @@ -#!/usr/bin/env python3 -import asyncio -import pprint -import functools -from typing import TYPE_CHECKING, Union -from ..sequences import ListParser -from math import floor -from .SenderLogic import pattern_element, compose_parametric_patterns - -if TYPE_CHECKING: - from ..clock import Clock - - -class OSCSender: - def __init__( - self, - clock: "Clock", - osc_client, - address: str, - at: Union[float, int] = 0, - **kwargs, - ): - - self.clock = clock - self._number_parser, self._name_parser = (self.clock.parser, self.clock.parser) - self.osc_client = osc_client - self.address = self._name_parser.parse(address) - - self.content = {} - for key, value in kwargs.items(): - if isinstance(value, (int, float)): - self.content[key] = value - else: - self.content[key] = self._number_parser.parse(value) - self.after: int = at - - # Iterating over kwargs. If parameter seems to refer to a - # method (usually dynamic SuperDirt parameters), call it - for k, v in kwargs.items(): - method = getattr(self, k, None) - if callable(method): - method(v) - else: - self.content[k] = v - - def __str__(self): - """String representation of a sender content""" - param_dict = pprint.pformat(self.content) - return f"{self.address}: {param_dict}" - - # ------------------------------------------------------------------------ - # GENERIC Mapper: make parameters chainable! - - def __getattr__(self, name: str): - method = functools.partial(self.addOrChange, name=name) - method.__doc__ = f"Updates the sound's {name} parameter." - return method - - def addOrChange(self, values, name: str): - """Will set a parameter or change it if already in message""" - - # Detect if a given parameter is a pattern, form a valid pattern - if isinstance(values, (str)): - self.content |= {name: self._number_parser.parse(values)} - return self - - def schedule(self, message: dict): - async def _waiter(): - await handle - self.osc_client.send(self.clock, message["address"], message["message"]) - - ticks = self.clock.get_beat_ticks(self.after, sync=False) - # Beat synchronization is disabled since `self.after` - # is meant to offset us from the current time - handle = self.clock.wait_after(n_ticks=ticks) - asyncio.create_task(_waiter(), name="osc-scheduler") - - def out(self, i: int = 0, div: int = 1, rate: int = 1) -> None: - """Sender method""" - - if i % div != 0: - return - - final_message = {} - - i = int(i) - - def convert_list_to_dict(lst): - res_dct = {lst[i]: lst[i + 1] for i in range(0, len(lst), 2)} - return res_dct - - def _message_without_iterator(): - """Compose a message if no iterator is given. This will simply - return the first value from every pattern, except for silence. - Silence will return nothing.""" - - # This is the case where nothing is returned - if self.address == [] or self.address[0] is None: - return - if isinstance(self.address, (list, str)): - given_address = self.address[0] - if given_address is None: - return - else: - final_message["address"] = "/" + given_address - - # We now have the address and we move to the content of the message - # We will store the final message inside a list - final_message["message"] = [] - - # Browsing items and figuring out what the first value is - for key, value in self.content.items(): - if value == []: - continue - if isinstance(value, list): - given_value = value[0] - if given_value is None: - return - else: - if key != "trig": - final_message["message"].append(value) - - if "trig" not in self.content.keys(): - trig = 1 - else: - trig = int(self.content["trig"][0]) - if trig: - return self.schedule(final_message) - - def _message_with_iterator(): - """Compose a message if an iterator is given""" - - if "trig" not in self.content.keys(): - self.content["trig"] = 1 - - # We need to determine the address, given that - # the address can also be a silence. - if self.address == []: - return - if isinstance(self.address, list): - new_element = self.address[ - pattern_element( - iterator=i, div=div, rate=rate, pattern=self.address - ) - ] - if new_element is None: - return - else: - final_message["address"] = "/" + new_element - else: - final_message["address"] = "/" + self.address - - # Now that we have it, we will iterate over pattern arguments to - # form the message, just like in the non-iterated version - - final_message["message"] = [] - pattern_result = compose_parametric_patterns( - div=div, rate=rate, iterator=i, items=self.content.items() - ) - final_message["message"].extend(pattern_result) - - # Now we have to an enormous operation just to check on trig... - if isinstance(self.content["trig"], list): - trig = self.content["trig"][ - pattern_element( - iterator=i, div=div, rate=rate, pattern=self.content["trig"] - ) - ] - if trig is None: - for decreasing_index in range(i, -1, -1): - trig = self.content["trig"][ - pattern_element( - iterator=decreasing_index, - div=div, - rate=rate, - pattern=self.content["trig"], - ) - ] - if trig is None: - continue - else: - trig = int(trig) - break - if trig is None: - raise ValueError("Pattern does not contain any value") - else: - trig = int(trig) - elif isinstance(self.content["trig"], (int, float, str)): - trig = int(self.content["trig"]) - - if trig: - return self.schedule(final_message) - - # Ultimately composing and sending message - if i is None: - return _message_without_iterator() - else: - return _message_with_iterator() diff --git a/sardine/io/Osc.py b/sardine/io/Osc.py deleted file mode 100644 index 0f9cf68a..00000000 --- a/sardine/io/Osc.py +++ /dev/null @@ -1,209 +0,0 @@ -#!/usr/bin/env python3 -import asyncio -from typing import Callable, Any -from time import time -from typing import Union, TYPE_CHECKING, List -from osc4py3 import oscbuildparse -from functools import partial -from osc4py3.as_eventloop import ( - osc_startup, - osc_udp_client, - osc_udp_server, - osc_method, - osc_send, - osc_process, - osc_terminate, -) -from osc4py3.oscmethod import * # does OSCARG_XXX -from rich import print - -if TYPE_CHECKING: - from ..clock import Clock - -__all__ = ("Receiver", "Client", "client", "dirt") - - -def flatten(l): - if isinstance(l, (list, tuple)): - if len(l) > 1: - return [l[0]] + flatten(l[1:]) - else: - return l[0] - else: - return [l] - - -class Receiver: - - """ - Incomplete and temporary implementation of an OSC message receiver. - Will be completed later on when I will have found the best method to - log incoming values. - """ - - def __init__( - self, port: int, ip: str = "127.0.0.1", name: str = "receiver", at: int = 0 - ): - """ - Keyword parameters - ip: str -- IP address - port: int -- network port - name: str -- Name attributed to the OSC receiver - """ - self._ip, self._port, self._name = ip, port, name - self._server = osc_udp_server(ip, port, name) - self._watched_values = {} - - def _generic_store(self, address) -> None: - """Generic storage function to attach to a given address""" - - def generic_value_tracker(*args, **kwargs): - """Generic value tracker to be attached to an address""" - self._watched_values[address] = {"args": flatten(args), "kwargs": kwargs} - return (args, kwargs) - - osc_method(address, generic_value_tracker, argscheme=OSCARG_DATA) - - def watch(self, address: str): - """ - Watch the value of a given OSC address. Will be recorded in memory - in the self._watched_values dictionary accessible through the get() - method - """ - print(f"[yellow]Watching address [red]{address}[/red].[/yellow]") - self._generic_store(address) - - def attach(self, address: str, function: Callable, watch: bool = False): - """ - Attach a callback to a given address. You can also toggle the watch - boolean value to tell if the value should be tracked by the receiver. - It allows returning values from the callback to be retrieved later in - through the get(address) method. - """ - print( - f"[yellow]Attaching function [red]{function.__name__}[/red] to address [red]{address}[/red][/yellow]" - ) - osc_method(address, function) - if watch: - self.watch(address) - - def get(self, address: str) -> Union[Any, None]: - """Get a watched value. Return None if not found""" - try: - return self._watched_values[address] - except KeyError: - return None - - -class Client: - def __init__( - self, - ip: str = "127.0.0.1", - port: int = 57120, - name: str = "SuperDirt", - ahead_amount: Union[float, int] = 0.03, - at: int = 0, - ): - - """ - Keyword parameters - ip: str -- IP - port: int -- network port - name: str -- Name attributed to the OSC connexion - ahead_amount: Union[float, int] -- (in ms.) send timestamp - in the future, x ms. after current time. - """ - - self._ip, self._port = (ip, port) - self._name, self._ahead_amount = (name, ahead_amount) - self.after: int = at - osc_startup() - self.client = osc_udp_client(address=self._ip, port=self._port, name=self._name) - - @property - def port(self): - return self._port - - @port.setter - def port(self, value): - self._port = value - - @property - def ip(self): - return self._ip - - @ip.setter - def ip(self, value): - self._ip = value - - @property - def ahead_amount(self): - return self._ahead_amount - - @ahead_amount.setter - def ahead_amount(self, value): - self._ahead_amount = value - - def send( - self, clock: "Clock", address: str, message: oscbuildparse.OSCBundle - ) -> None: - async def _waiter(): - await handle - self._send(clock, address, message) - - ticks = clock.get_beat_ticks(self.after, sync=False) - handle = clock.wait_after(n_ticks=ticks) - asyncio.create_task(_waiter(), name="osc-scheduler") - - def _get_clock_information(self, clock: "Clock") -> list: - """Send out everything you can possibly send about the clock""" - return ( - ["/cps", (clock.bpm / 60 / clock.beat_per_bar)], - ["/bpm", clock.bpm], - ["/beat", clock.beat], - ["/bar", clock.bar], - ["/tick", clock.tick], - ["/phase", clock.phase], - ["/accel", clock.accel], - ) - - def _send_clock_information(self, clock: "Clock"): - for element in self._get_clock_information(clock): - self._send(clock=clock, address=element[0], message=[element[1]]) - - def _send(self, clock: "Clock", address: str, message): - """Build user-made OSC messages""" - msg = oscbuildparse.OSCMessage(address, None, message) - bun = oscbuildparse.OSCBundle( - oscbuildparse.unixtime2timetag(time() + self._ahead_amount), [msg] - ) - osc_send(bun, self._name) - osc_process() - - def send_timed_message(self, message, clock): - """Build and send OSC bundles""" - message = message + [ - "cps", - (clock.bpm / 60 / clock.beat_per_bar), - "delta", - (clock._get_tick_duration() * 100), - ] - msg = oscbuildparse.OSCMessage("/dirt/play", None, message) - bun = oscbuildparse.OSCBundle( - oscbuildparse.unixtime2timetag(time() + self._ahead_amount), [msg] - ) - osc_send(bun, self._name) - osc_process() - - def kill(self): - """Terminate OSC connexion""" - osc_terminate() - - -client = Client() - - -def dirt(message, clock, ahead_amount: Union[int, float] = 0.03): - """Sending messages to SuperDirt/SuperCollider""" - client.ahead_amount = ahead_amount - client.send_timed_message(message=message, clock=clock) diff --git a/sardine/io/SenderLogic.py b/sardine/io/SenderLogic.py deleted file mode 100644 index c1858d5f..00000000 --- a/sardine/io/SenderLogic.py +++ /dev/null @@ -1,66 +0,0 @@ -from typing import List, Tuple, Any -from ..sequences.LexerParser.Chords import Chord -from math import floor - - -def pattern_element(div: int, rate: int, iterator: int, pattern: list) -> int: - """Joseph Enguehard's algorithm for solving iteration speed""" - return floor(iterator * rate / div) % len(pattern) - - -def compose_parametric_patterns( - div: int, - rate: int, - iterator: int, - items: List[Tuple[str, Any]], - cast_to_int: bool = False, - midi_overflow_protection: bool = False, -) -> list: - final_message = [] - - if cast_to_int: - conv_function = int - else: - conv_function = lambda x: x - - for key, value in items: - if value == []: - continue - if isinstance(value, list): - new_value = value[ - pattern_element(iterator=iterator, div=div, rate=rate, pattern=value) - ] - if new_value is None: - for decreasing_index in range(iterator, -1, -1): - new_value = value[ - pattern_element( - iterator=decreasing_index, - div=div, - rate=rate, - pattern=value, - ) - ] - if new_value is None: - continue - else: - value = conv_function(new_value) - break - if value is None: - raise ValueError("Pattern does not contain any value") - else: - value = conv_function(new_value) - - # Overflow protection takes place here for MIDI values (0-127) - if midi_overflow_protection: - if key != "delay": - if value > 127: - value = 127 - elif value < 0: - value = 0 - final_message.extend([key, value]) - elif isinstance(value, Chord): - final_message.extend([key, value]) - else: - final_message.extend([key, conv_function(value)]) - - return final_message diff --git a/sardine/io/SuperDirtSender.py b/sardine/io/SuperDirtSender.py deleted file mode 100644 index 15850071..00000000 --- a/sardine/io/SuperDirtSender.py +++ /dev/null @@ -1,210 +0,0 @@ -#!/usr/bin/env python3 -import asyncio -import pprint -import functools -from typing import TYPE_CHECKING, Union -from ..io import dirt -from ..sequences import ListParser -from ..sequences.LexerParser.Chords import Chord -from .SenderLogic import pattern_element, compose_parametric_patterns - -if TYPE_CHECKING: - from ..clock import Clock - - -class SuperDirtSender: - def __init__( - self, - clock: "Clock", - sound: str, - at: Union[float, int] = 0, - nudge: Union[float, int] = 0.0, - **kwargs, - ): - - self.clock = clock - self._general_parser = self.clock.parser - self.sound = self._parse_sound(sound) - self.after: int = at - self._nudge: Union[float, int] = nudge - - # Iterating over kwargs. If parameter seems to refer to a - # method (usually dynamic SuperDirt parameters), call it - - self.content = {"orbit": 0} - for k, v in kwargs.items(): - method = getattr(self, k, None) - if callable(method): - method(v) - - def _parse_sound(self, sound_pattern: str): - """Pre-parse sound param during __init__""" - pat = self._general_parser.parse(sound_pattern) - return pat - - def __str__(self): - """String representation of a sender content""" - param_dict = pprint.pformat(self.content) - return f"{self.sound}: {param_dict}" - - def __getattr__(self, name: str): - method = functools.partial(self.addOrChange, name=name) - method.__doc__ = f"Updates the sound's {name} parameter." - return method - - def addOrChange(self, values, name: str): - """Will set a parameter or change it if already in message""" - if isinstance(values, str): - self.content |= {name: self._general_parser.parse(values)} - else: - self.content |= {name: values} - return self - - def schedule(self, message): - """ - Higher logic of the schedule function. Is able to send both monophonic - and polyphonic messages. - """ - # Analyse message to find chords lying around - def chords_in_message(message: list) -> bool: - return any(isinstance(x, Chord) for x in message) - - def longest_list_in_message(message: list) -> int: - return max(len(x) if isinstance(x, (Chord, list)) else 1 for x in message) - - if chords_in_message(message): - # We need to compose len(longest_list) messages - longest_list = longest_list_in_message(message) - list_of_messages = [] - for _ in range(0, longest_list): - note_message = [ - x if not isinstance(x, Chord) else x[_ % len(x)] for x in message - ] - list_of_messages.append(note_message) - for message in list_of_messages: - self._schedule(message) - else: - self._schedule(message) - - def _schedule(self, message): - async def _waiter(): - await handle - dirt(message, self.clock, ahead_amount=self._nudge) - - ticks = self.clock.get_beat_ticks(self.after, sync=False) - # Beat synchronization is disabled since `self.after` - # is meant to offset us from the current time - handle = self.clock.wait_after(n_ticks=ticks) - asyncio.create_task(_waiter(), name="superdirt-scheduler") - - def out(self, i: int = 0, div: int = 1, rate: int = 1) -> None: - """ - Prototype for the Sender output. - """ - if i % div != 0: - return - i = int(i) - final_message = [] - - def _message_without_iterator(): - """Compose a message if no iterator is given""" - composite_tokens = (list, Chord) - single_tokens = (type(None), str) - - # ================================================================= - # HANDLING THE SOUND PARAMETER - # ================================================================= - - if self.sound == []: - return - - # Handling lists - if isinstance(self.sound, composite_tokens): - first_element = self.sound[0] - if first_element is not None: - final_message.extend(["sound", self.sound[0]]) - else: - return - - # Handling other representations (str, None) - elif isinstance(self.sound, single_tokens): - if self.sound is None: - return - else: - final_message.extend(["sound", self.sound]) - - # ================================================================= - # HANDLING OTHER PARAMETERS - # ================================================================= - - # Handling other non-essential keys - for key, value in self.content.items(): - # We don't care if there is no value, just drop it - if value == []: - continue - if isinstance(value, composite_tokens): - value = value[0] - final_message.extend([key, value]) - - # ================================================================= - # TRIGGER MANAGEMENT - # ================================================================= - - if "trig" not in final_message: - final_message.extend(["trig", 1]) - - trig_value = final_message[final_message.index("trig") + 1] - if trig_value: - return self.schedule(final_message) - - def _message_with_iterator(): - """Compose a message if an iterator is given""" - composite_tokens = (list, Chord) - single_tokens = (type(None), str) - - # ================================================================= - # HANDLING THE SOUND PARAMETER - # ================================================================= - - if self.sound == []: - return - if isinstance(self.sound, list): - new_element = self.sound[ - pattern_element(iterator=i, div=div, rate=rate, pattern=self.sound) - ] - if new_element is None: - return - else: - final_message.extend(["sound", new_element]) - else: - if self.sound is None: - return - else: - final_message.extend(["sound", self.sound]) - - # ================================================================= - # HANDLING OTHER PARAMETERS - # ================================================================= - - pattern_result = compose_parametric_patterns( - div=div, rate=rate, iterator=i, items=self.content.items() - ) - final_message.extend(pattern_result) - - # ================================================================= - # TRIGGER MANAGEMENT - # ================================================================= - - # Trig must always be included - if "trig" not in final_message: - final_message.extend(["trig", str(1)]) - - trig_value = final_message[final_message.index("trig") + 1] - if trig_value: - return self.schedule(final_message) - - # Ultimately composing and sending message - if i == 0: - return _message_without_iterator() - else: - return _message_with_iterator() diff --git a/sardine/io/UserConfig.py b/sardine/io/UserConfig.py index 8b47d839..61cd34a4 100644 --- a/sardine/io/UserConfig.py +++ b/sardine/io/UserConfig.py @@ -1,7 +1,8 @@ -from dataclasses import dataclass import json +from dataclasses import dataclass from pathlib import Path from typing import Union + from appdirs import * from rich import print @@ -22,10 +23,11 @@ "bpm": 135, "beats": 4, "debug": False, - "ppqn": 24, - "boot_superdirt": False, + "superdirt_handler": False, + "sardine_boot_file": True, + "boot_supercollider": False, "verbose_superdirt": False, - "active_clock": True, + "link_clock": False, "superdirt_config_path": str(USER_DIR / "default_superdirt.scd"), "user_config_path": str(USER_DIR / "user_configuration.py"), "deferred_scheduling": True, @@ -47,14 +49,15 @@ def _recursive_update(dest: dict, src: dict): class Config: midi: Union[str, None] beats: int - ppqn: int bpm: int debug: bool superdirt_config_path: str verbose_superdirt: bool user_config_path: str - boot_superdirt: bool - active_clock: bool + superdirt_handler: bool + boot_supercollider: bool + sardine_boot_file: bool + link_clock: bool deferred_scheduling: bool @classmethod @@ -64,11 +67,12 @@ def from_dict(cls, data: dict) -> "Config": midi=config["midi"], beats=config["beats"], debug=config["debug"], - ppqn=config["ppqn"], bpm=config["bpm"], - boot_superdirt=config["boot_superdirt"], + superdirt_handler=config["superdirt_handler"], + boot_supercollider=config["boot_supercollider"], + sardine_boot_file=config["sardine_boot_file"], verbose_superdirt=config["verbose_superdirt"], - active_clock=config["active_clock"], + link_clock=config["link_clock"], superdirt_config_path=config["superdirt_config_path"], user_config_path=config["user_config_path"], deferred_scheduling=config["deferred_scheduling"], @@ -80,12 +84,13 @@ def to_dict(self) -> dict: "midi": self.midi, "beats": self.beats, "debug": self.debug, - "ppqn": self.ppqn, "bpm": self.bpm, - "boot_superdirt": self.boot_superdirt, + "superdirt_handler": self.superdirt_handler, + "boot_supercollider": self.boot_supercollider, + "sardine_boot_file": self.sardine_boot_file, "verbose_superdirt": self.verbose_superdirt, "superdirt_config_path": self.superdirt_config_path, - "active_clock": self.active_clock, + "link_clock": self.link_clock, "user_config_path": self.user_config_path, "deferred_scheduling": self.deferred_scheduling, } diff --git a/sardine/io/__init__.py b/sardine/io/__init__.py index 18eba3ea..b1b1dee2 100644 --- a/sardine/io/__init__.py +++ b/sardine/io/__init__.py @@ -1,9 +1 @@ -from .MidiIo import * -from .MidiListener import * -from .Osc import * from .UserConfig import * -from .SuperDirtSender import * -from .OSCSender import * -from .MIDISender import * -from .SenderLogic import * -from .MarkdownReader import * diff --git a/sardine/legacy/LinkToPy.py b/sardine/legacy/LinkToPy.py deleted file mode 100644 index 500657f9..00000000 --- a/sardine/legacy/LinkToPy.py +++ /dev/null @@ -1,271 +0,0 @@ -from __future__ import print_function -import socket -import edn_format -import os -import numpy as np -import time -import threading -import errno -from socket import error as socket_error -import warnings - -__all__ = ("LinkInterface",) - - -class LinkInterface: - """A fork of Bdyetton simple python client to communicate with carabiner: - https://github.com/bdyetton/LinkToPy. Requires edn_format and adapted for - Sardine.""" - - def __init__( - self, - path_to_carabiner, - tcp_ip="127.0.0.1", - tcp_port=17000, - buffer_size=1024, - callbacks=None, - ): - self._tcp_ip, self._tcp_port = tcp_ip, tcp_port - self._buffer_size = buffer_size - self.peers = 0 - self.quantum_ = 4 - self.phase_ = 0 - self.start_, self.beat_ = [-1] * 2 - self.bpm_ = 120 - - if callbacks is None: - self.callbacks = {} - else: - self.callbacks = callbacks - - self.terminated = threading.Event() - self.start_carabiner_and_open_socket(path_to_carabiner) - - thread = threading.Thread(target=self._listener) - thread.daemon = True - thread.start() - print("Link Interface Started") - - def decode_edn_msg(self, msg): - """Decodes a TCP message from Carabiner to python dictionary""" - msg = msg.decode() - msg_type = msg[: msg.index(" ")] - try: - striped_msg = msg[msg.index("{") :] - decoded_msg = edn_format.loads(striped_msg, write_ply_tables=False) - except: - decoded_msg = "" - - # Because the edn_format package does not return normal dam dicts (or string keywords). What dicts. - if type(decoded_msg) is edn_format.immutable_dict.ImmutableDict: - decoded_msg = { - str(key).strip(":"): value for key, value in decoded_msg.dict.items() - } - - return msg_type, decoded_msg - - def status(self, callback=None): - """Wrapper for Status""" - try: - self.s.send(b"status\n") - except BrokenPipeError: - return - if callback is not None: - self.callbacks["status"] = callback - - def set_bpm(self, bpm, callback=None): - """Wrapper for bpm""" - msg = "bpm " + str(bpm) + "\n" - try: - self.s.send(msg.encode()) - except BrokenPipeError: - return - if callback is not None: - self.callbacks["bpm"] = callback - - def beat_at_time(self, time_in_ms, quantum=8, callback=None): - """Wrapper for Beat At Time""" - msg = "beat-at-time " + str(time_in_ms) + " " + str(quantum) + "\n" - try: - self.s.send(msg.encode()) - except BrokenPipeError: - return - if callback is not None: - self.callbacks["beat-at-time"] = callback - - return - - def time_at_beat(self, beat, quantum=8, callback=None): - """Wrapper for Time At Beat""" - msg = "time-at-beat " + str(beat) + " " + str(quantum) + "\n" - try: - self.s.send(msg.encode()) - except BrokenPipeError: - return - if callback is not None: - self.callbacks["time-at-beat"] = callback - - def phase_at_time(self, time_in_ms, quantum=8, callback=None): - """Wrapper for Phase At Time""" - msg = "phase-at-time " + str(time_in_ms) + " " + str(quantum) + "\n" - try: - self.s.send(msg.encode()) - except BrokenPipeError: - return - if callback is not None: - self.callbacks["phase-at-time"] = callback - - def force_beat_at_time(self, beat, time_in_ms, quantum=8, callback=None): - """Wrapper for Beat At Time""" - msg = ( - "force-beat-at-time " - + str(beat) - + " " - + str(time_in_ms) - + " " - + str(quantum) - + "\n" - ) - try: - self.s.send(msg.encode()) - except BrokenPipeError: - return - if callback is not None: - self.callbacks["force-beat-at-time"] = callback - - def request_beat_at_time(self, beat, time_in_ms, quantum=8, callback=None): - msg = ( - "request-beat-at-time " - + str(beat) - + " " - + str(time_in_ms) - + " " - + str(quantum) - + "\n" - ) - try: - self.s.send(msg.encode()) - except BrokenPipeError: - return - if callback is not None: - self.callbacks["request-beat-at-time"] = callback - - def enable_start_stop_sync(self, callback=None): - try: - self.s.send(b"enable-start-stop-sync\n") - except BrokenPipeError: - return - if callback is not None: - self.callbacks["enable-start-stop-sync"] = callback - - def disable_start_stop_sync(self, callback=None): - try: - self.s.send(b"disable-start-stop-sync\n") - except BrokenPipeError: - return - if callback is not None: - self.callbacks["disable-start-stop-sync"] = callback - - def start_playing(self, time_in_ms, callback=None): - msg = "start-playing " + str(time_in_ms) + "\n" - try: - self.s.send(msg.encode()) - except BrokenPipeError: - return - if callback is not None: - self.callbacks["start-playing"] = callback - - def stop_playing(self, time_in_ms, callback=None): - msg = "stop-playing " + str(time_in_ms) + "\n" - try: - self.s.send(msg.encode()) - except BrokenPipeError: - return - if callback is not None: - self.callbacks["stop-playing"] = callback - - def now(self): - """Returns the monotonic system time as used by Link. This is in ms, and is the same format as 'start' - See the Carabiner note on Clocks for more information""" - return int(time.monotonic() * 1000 * 1000) - - def start_carabiner(self, path_to_car): - try: - if os.access(path_to_car, os.X_OK): - print("Starting Carabiner: %s" % path_to_car) - pid = os.system(path_to_car + " >car_logs.log") - self.terminated.clear() - - while True: - time.sleep(0.1) - try: - os.kill(pid, 0) - except OSError: - break - except OSError as e: - print("Couldn't bind with Carabiner: {e}") - - print("Carabiner terminated") - self.terminated.set() - - def start_carabiner_and_open_socket(self, carabiner_path): - not_connected, not_connected_ticker = True, 0 - while not_connected: - try: - self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.s.connect((self._tcp_ip, self._tcp_port)) - not_connected = False - except socket_error as serr: - if serr.errno != errno.ECONNREFUSED: - # Not the error we are looking for, re-raise - raise serr - not_connected_ticker += 1 - - if not_connected_ticker == 10: - thread = threading.Thread( - target=self.start_carabiner, args=[carabiner_path] - ) - thread.start() - - if not_connected_ticker > 30: - warnings.warn( - "Socket Connection Timeout, Carabiner could not be started" - ) - break - print(".", end="", flush=True) - time.sleep(0.1) - - def _listener(self): - while not self.terminated.is_set(): - try: - msg = self.s.recv(self._buffer_size) - except BrokenPipeError: - break - - if msg: - msg_type, msg_data = self.decode_edn_msg(msg) - else: - msg_type = "" - - if msg_type == "beat-at-time": - self.quantum_ = msg_data["quantum"] - - if msg_type == "phase-at-time": - self.phase_ = msg_data["phase"] - self.quantum_ = msg_data["quantum"] - - if msg_type == "status": - self.bpm_ = msg_data["bpm"] - self.beat_ = msg_data["beat"] - self.start_ = msg_data["start"] - - if msg_type == "time_at_beat": - self.next_beat_ = (msg_data["beat"], msg_data["when"]) - - if msg_type in self.callbacks: - self.callbacks[msg_type](msg_data) - - self.terminated.set() - - def __del__(self): - self.s.close() diff --git a/sardine/legacy/OldClock.py b/sardine/legacy/OldClock.py deleted file mode 100644 index 52658ddb..00000000 --- a/sardine/legacy/OldClock.py +++ /dev/null @@ -1,380 +0,0 @@ -import asyncio -import itertools -import mido -import time -from rich import print -from typing import Union, Callable -import traceback -import types -import functools -import copy -from .AsyncRunner import AsyncRunner -from ..io.MidiIo import MIDIIo - -atask = asyncio.create_task -sleep = asyncio.sleep - - -class Clock: - - """ - Naive MIDI Clock and scheduler implementation. This class - is the core of Sardine. It generates an asynchronous MIDI - clock and will schedule functions on it accordingly. - - Keyword arguments: - port_name: str -- Exact String for the MIDIOut Port. - bpm: Union[int, float] -- Clock Tempo in beats per minute - beats_per_bar: int -- Number of beats in a given bar - """ - - def __init__( - self, - port_name: Union[str, None] = None, - bpm: Union[float, int] = 120, - beat_per_bar: int = 4, - ): - - self._midi = MIDIIo(port_name=port_name) - - # Clock maintenance related - self.child = {} - - self.running = False - self._debug = False - # Timing related - self._bpm = bpm - self.initial_time = 0 - self.delta = 0 - self.beat = -1 - self.ppqn = 48 - self._phase_gen = itertools.cycle(range(1, self.ppqn + 1)) - self.phase = 0 - self.beat_per_bar = beat_per_bar - self._current_beat_gen = itertools.cycle(range(1, self.beat_per_bar + 1)) - self.current_beat = 0 - self.elapsed_bars = 0 - self.tick_duration = self._get_tick_duration() - self.tick_time = 0 - - # ---------------------------------------------------------------------- # - # Setters and getters - - def get_bpm(self): - """BPM Getter""" - return self._bpm - - def set_bpm(self, new_bpm: int) -> None: - """BPM Setter""" - if 1 < new_bpm < 800: - self._bpm = new_bpm - self.tick_duration = self._get_tick_duration() - - def get_debug(self): - """Debug getter""" - return self._debug - - def set_debug(self, boolean: bool): - """Debug setter""" - self._debug = boolean - - bpm = property(get_bpm, set_bpm) - debug = property(get_debug, set_debug) - - # ---------------------------------------------------------------------- # - # Private methods - - def _get_tick_duration(self): - return ((60 / self.bpm) / self.ppqn) - self.delta - - def _reset_internal_clock_state(self): - """Reset internal clock state with MIDI message""" - self.beat = -1 - self._phase_gen, self.phase = itertools.cycle(range(1, self.ppqn + 1)), 0 - self._current_beat_gen, self.current_beat = ( - itertools.cycle(range(1, self.beat_per_bar)), - 0, - ) - self.elapsed_bars = 0 - self.tick_duration = (self.bpm / 60) / self.beat_per_bar - - def _update_phase(self) -> None: - """Update the current phase in MIDI Clock""" - self.phase = next(self._phase_gen) - - def _update_current_beat(self) -> None: - """Update the current beat in bar""" - self.current_beat = next(self._current_beat_gen) - - # ---------------------------------------------------------------------- # - # Scheduler methods - - def schedule(self, function): - - name = function.__name__ - keys = self.child.keys() - - if name in keys: - self.child[name].function = function - atask(self.schedule_runner(self.child[name].function, init=False)) - else: - self.child[name] = AsyncRunner( - function=function, - function_save=copy_func(function), - last_valid_function=None, - ) - - atask(self.schedule_runner(function=self.child[name].function, init=True)) - - async def schedule_runner(self, function, init=False): - """New version of the inner mechanism""" - failed = False - name = function.__name__ - cur_bar = self.elapsed_bars - - def grab_arguments_from_coroutine(cr): - """Grab arguments from coroutine frame""" - arguments = cr.cr_frame - arguments = arguments.f_locals - return arguments - - arguments = grab_arguments_from_coroutine(function) - try: - delay = arguments["d"] - except KeyError: - delay = 1 - delay = self.ppqn * delay - - # Waiting time - if init: - print(f"[Init {name}]") - while self.phase != 1 and self.elapsed_bars != cur_bar + 1: - await sleep(self._get_tick_duration()) - else: - # Busy waiting until execution time - now = self.get_tick_time() - while self.tick_time < now + delay: - # You might increase the resolution even more - await sleep(self._get_tick_duration() / self.ppqn) - - # Error catching here! (Working!) - if name in self.child.keys(): - try: - await self.child[name].function - # This is where we need a fresh coroutine - self.child[name].last_valid_function = self.child[name].function_save - except Exception as e: - print(f"Caught exception {e}") - failed = True - - if failed: - try: - # starting the failsafe - await self.child[name].last_valid_function - except Exception as e: - print(f"Le bide est total!: {e}") - - async def _schedule(self, function, init=False): - """Inner scheduling""" - name = function.__name__ - cur_bar = self.elapsed_bars - - def grab_arguments_from_coroutine(cr): - """Grab arguments from coroutine frame""" - arguments = cr.cr_frame - arguments = arguments.f_locals - return arguments - - arguments = grab_arguments_from_coroutine(function) - try: - delay = arguments["d"] - except KeyError: - delay = 1 - - # Transform delay into multiple or division of ppqn - delay = self.ppqn * delay - - if init: - print(f"[Init {name}]") - while self.phase != 1 and self.elapsed_bars != cur_bar + 1: - await sleep(self._get_tick_duration()) - else: - # Busy waiting until execution time - now = self.get_tick_time() - while self.tick_time < now + delay: - # You might increase the resolution even more - await sleep(self._get_tick_duration() / self.ppqn) - - # Execution time - # Trying something here with safe! - if name in self.child.keys(): - atask(function) - - # def _auto_schedule(self, function): - # """ Loop mechanism """ - - # # If the code reaches this point, first loop was succesful. It's time - # # to register a new version of last_valid_function. However, I need - # # to find a way to catch exceptions right here! Only Task exceptions - # # will show me if a task failed for some reason. - # name = function.__name__ - - # if name in self.child.keys(): - # self.child[name].function = function - # self.child[name].tasks.append( - # asyncio.create_task(self._schedule( - # function=self.child[name].function))) - - def __rshift__(self, function): - """Alias to _auto_schedule""" - self._auto_schedule(function=function) - - def __lshift__(self, function): - """Alias to remove""" - self.remove(function=function) - - # ---------------------------------------------------------------------- # - # Public methods - - def remove(self, function): - """Remove a function from the scheduler""" - - if function.__name__ in self.child.keys(): - del self.child[function.__name__] - - def get_phase(self): - return self.phase - - def print_children(self): - """Print all children on clock""" - [print(child) for child in self.child] - - def ticks_to_next_bar(self) -> None: - """How many ticks until next bar?""" - return (self.ppqn - self.phase - 1) * self._get_tick_duration() - - async def play_note( - self, - note: int = 60, - channel: int = 0, - velocity: int = 127, - duration: Union[float, int] = 1, - ) -> None: - - """ - OBSOLETE // Was used to test things but should be removed. - Dumb method that will play a note for a given duration. - - Keyword arguments: - note: int -- the MIDI note to be played (default 1.0) - duration: Union [int, float] -- MIDI tick time multiplier (default 1.0) - channel: int -- MIDI Channel (default 0) - velocity: int -- MIDI velocity (default 127) - """ - - async def send_something(message): - """inner non blocking function""" - asyncio.create_task(self._midi.send_async(message)) - - note_on = mido.Message("note_on", note=note, channel=channel, velocity=velocity) - note_off = mido.Message( - "note_off", note=note, channel=channel, velocity=velocity - ) - await send_something(note_on) - await asyncio.sleep(self.tick_duration * duration) - await send_something(note_off) - - async def run_clock_initial(self): - """The MIDIClock needs to start""" - self.run_clock() - - def send_stop(self): - """Stop the running clock and send stop message""" - self.running = False - self._midi.send_stop() - - def send_reset(self) -> None: - """MIDI Reset message""" - self.send_stop() - self._midi.send(mido.Message("reset")) - self._reset_internal_clock_state() - - async def send_start(self, initial: bool = False) -> None: - """MIDI Start message""" - self._midi.send(mido.Message("start")) - self.running = True - if initial: - asyncio.create_task(self.run_clock()) - - def next_beat_absolute(self): - """Return time between now and next beat in absolute time""" - return self.tick_duration * (self.ppqn - self.phase) - - def log(self) -> None: - - """ - Pretty print information about Clock timing on the console. - Used for debugging purposes. Not to be used when playing, - can be very verbose. Will overflow the console in no-time. - """ - - color = "[bold red]" if self.phase == 1 else "[bold yellow]" - first = ( - color + f"BPM: {self.bpm}, PHASE: {self.phase:02}, DELTA: {self.delta:2f}" - ) - second = ( - color + f" || [{self.tick_time}] {self.current_beat}/{self.beat_per_bar}" - ) - print(first + second) - - async def run_clock(self): - - """ - Main Method for the MIDI Clock. Full of errors and things that - msut be fixed. Drift can happen, and it might need a full rewrite. - - Keyword arguments: - debug: bool -- print debug messages on stdout. - """ - - async def _clock_update(): - """Things the clock should do every tick""" - - self.tick_duration = self._get_tick_duration() - self.delta - - begin = time.perf_counter() - self.delta = 0 - - await asyncio.sleep(self.tick_duration) - asyncio.create_task(self._midi.send_clock_async()) - - # Time grains - self.tick_time += 1 - self._update_phase() - - # XPPQN = 1 Beat - if self.phase == 1: - self._update_current_beat() - if self.phase == 1 and self.current_beat == 1: - self.elapsed_bars += 1 - - # End of it - end = time.perf_counter() - self.delta = end - begin - if self._debug: - self.log() - - while self.running: - await _clock_update() - - def get_tick_time(self): - """Indirection to get tick time""" - return self.tick_time - - def ramp(self, min: int, max: int): - """Generate a ramp between min and max using phase""" - return self.phase % (max - min + 1) + min - - def iramp(self, min: int, max: int): - """Generate an inverted ramp between min and max using phase""" - return self.ppqn - self.phase % (max - min + 1) + min diff --git a/sardine/legacy/PatternParser.py b/sardine/legacy/PatternParser.py deleted file mode 100644 index b236e8b6..00000000 --- a/sardine/legacy/PatternParser.py +++ /dev/null @@ -1,144 +0,0 @@ -import itertools -import random -import re -from rich import print - - -class PatternParserOld: - """Mininotation for sequences""" - - OSC_ADDRESS_REGEX = re.compile( - r""" - (?P[/\w|]+) - (?: \?(?P\d*) )? - (?: !(?P\d+) )? - """, - re.VERBOSE, - ) - - SOUND_REGEX = re.compile( - # (?P[\w|]+) - r""" - (?P[\w?:\d|]+) - (?: \?(?P\d*) )? - (?: !(?P\d+) )? - """, - re.VERBOSE, - ) - - NUMBER_REGEX = re.compile( - r""" - (?P([-+]?[\d*\.\d+|]+)) - (?: \?(?P\d*) )? - (?: !(?P\d+) )? - (?: :(?P([-+]?[\d*\.\d+|]+) ))? - """, - re.VERBOSE, - ) - - def __init__(self, pattern: str, type: str): - - if type == "sound": - self.pattern = self.parse_sound_string(pattern) - elif type == "number": - self.pattern = self.parse_number_string(pattern) - elif type == "address": - self.pattern = self.parse_osc_address(pattern) - else: - raise TypeError("Pattern must be of type 'sound' or 'number'") - - def parse_sound_string(self, pattern: str) -> list[str]: - """Parse pattern string using the sound REGEX""" - rule = self.SOUND_REGEX - - def _expand_sound(pattern: str) -> list[str]: - # Split the incoming string - words, tokens = pattern.split(), [] - # Tokenize and parse - for w in words: - # Try to match a symbol, return None if not in spec - m = rule.fullmatch(w) - if m is None: - raise ValueError(f"unknown sound definition: {w!r}") - sound = [m["sound"]] - if "|" in m["sound"]: - sound = [random.choice(m["sound"].split("|"))] - else: - sound = [m["sound"]] - if m["chance"] is not None: - chance = int(m["chance"] or 50) - if random.randrange(100) >= chance: - continue - if m["repeat"] is not None: - sound *= int(m["repeat"]) - tokens.extend(sound) - return tokens - - parsed_expression = _expand_sound(pattern) - return parsed_expression - - def parse_osc_address(self, pattern: str) -> list[str]: - """Parse pattern string using the sound REGEX""" - rule = self.OSC_ADDRESS_REGEX - - def _expand_sound(pattern: str) -> list[str]: - # Split the incoming string - words, tokens = pattern.split(), [] - # Tokenize and parse - for w in words: - # Try to match a symbol, return None if not in spec - m = rule.fullmatch(w) - if m is None: - raise ValueError(f"unknown sound definition: {w!r}") - sound = [m["sound"]] - if "|" in m["sound"]: - sound = [random.choice(m["sound"].split("|"))] - else: - sound = [m["sound"]] - if m["chance"] is not None: - chance = int(m["chance"] or 50) - if random.randrange(100) >= chance: - continue - if m["repeat"] is not None: - sound *= int(m["repeat"]) - tokens.extend(sound) - return tokens - - parsed_expression = _expand_sound(pattern) - return parsed_expression - - def parse_number_string(self, pattern: str) -> list[str]: - """Parse number string using the number REGEX""" - rule = self.NUMBER_REGEX - - def _expand_number(s: str) -> list[str]: - # Split the incoming string - words, tokens = s.split(), [] - # Tokenize and parse - for w in words: - # Try to match a symbol, return None if not in spec - - m = rule.fullmatch(w) - if m is None: - raise ValueError(f"unknown number definition: {w!r}") - number = [m["number"]] - if m["chance"] is not None: - chance = int(m["chance"] or 50) - if random.randrange(100) >= chance: - continue - if m["repeat"] is not None: - number *= int(m["repeat"]) - if m["range"] is not None: - integer_test = str(number[0]).isdigit() - number = [random.uniform(float(number[0]), float(m["range"]))] - if integer_test: - number = [int(number[0])] - tokens.extend(number) - return tokens - - parsed_expression = _expand_number(pattern) - return parsed_expression - - def get_pattern(self) -> itertools.cycle: - """Get pattern as iterator""" - return itertools.cycle(self.pattern) diff --git a/sardine/run.py b/sardine/run.py new file mode 100644 index 00000000..a832e6f6 --- /dev/null +++ b/sardine/run.py @@ -0,0 +1,361 @@ +import importlib +import sys +from pathlib import Path +from string import ascii_lowercase, ascii_uppercase +from typing import Any, Callable, Optional, ParamSpec, TypeVar, Union, overload + +from rich import print + +from . import * +from .io.UserConfig import read_user_configuration +from .superdirt import SuperDirtProcess +from .utils import config_line_printer, get_snap_deadline, sardine_intro + +P = ParamSpec("P") # NOTE: name is similar to surfboards +T = TypeVar("T") + +# Reading user configuration (taken from sardine-config) +config = read_user_configuration() +clock = LinkClock if config.link_clock else InternalClock + +# Printing banner and some infos about setup/config +print(sardine_intro) +print(config_line_printer(config)) + +# Load user config +if Path(f"{config.user_config_path}").is_file(): + spec = importlib.util.spec_from_file_location( + "user_configuration", config.user_config_path + ) + module = importlib.util.module_from_spec(spec) + sys.modules[spec.name] = module + spec.loader.exec_module(module) + from user_configuration import * +else: + print(f"[red]No user provided configuration file found...") + +# Initialisation of the FishBowl (the environment holding everything together) +bowl = FishBowl( + clock=clock(tempo=config.bpm, bpb=config.beats), +) + +# Opening SuperColliderXSuperDirt subprocess. Dissociated from the SuperDirt handlers. +config = read_user_configuration() +if config.boot_supercollider: + try: + SC = SuperDirtProcess( + startup_file=( + config.superdirt_config_path if config.sardine_boot_file else None + ), + verbose=config.verbose_superdirt, + ) + except OSError as Error: + print(f"[red]SuperCollider could not be found: {Error}![/red]") + +# Basic handlers initialization + +# MIDI Handler: matching with the MIDI port defined in the configuration file +midi = MidiHandler(port_name=str(config.midi)) +bowl.add_handler(midi) + +# OSC Loop: handles processing OSC messages +osc_loop_obj = OSCLoop() + +# # OSC Handler: dummy OSC handler, mostly used for test purposes +# my_osc_connexion = OSCHandler( +# ip="127.0.0.1", +# port=12345, +# name="Custom OSC Connexion", +# ahead_amount=0.0, +# loop=my_osc_loop, +# ) +# Ocustom = my_osc_connexion.send + +# # OSC Listener Handler: dummy OSCIn handler, used for test purposes +# my_osc_listener = OSCInHandler( +# ip="127.0.0.1", port=33333, name="OSC-In test", loop=my_osc_loop +# ) + +# SuperDirt Handler: conditional +if config.superdirt_handler: + dirt = SuperDirtHandler(loop=osc_loop_obj) + +# NOTE: always keep this loop running for user-made OSC handlers +bowl.add_handler(osc_loop_obj) + +# Adding Players +player_names = ["P" + l for l in ascii_lowercase + ascii_uppercase] +for player in player_names: + p = Player(name=player) + globals()[player] = p + bowl.add_handler(p) + + +@overload +def swim( + func: Union[Callable[P, T], AsyncRunner], + /, + # NOTE: AsyncRunner doesn't support generic args/kwargs + *args: P.args, + snap: Optional[Union[float, int]] = 0, + **kwargs: P.kwargs, +) -> AsyncRunner: + ... + + +@overload +def swim( + func: None, + /, + *args: P.args, + snap: Optional[Union[float, int]] = 0, + **kwargs: P.kwargs, +) -> Callable[[Callable[P, T]], AsyncRunner]: + ... + + +# pylint: disable=keyword-arg-before-vararg # signature is valid +def swim(func=None, /, *args, snap=0, **kwargs): + """ + Swimming decorator: push a function to the scheduler. The function will be + declared and followed by the scheduler system to recurse in time if needed. + + Args: + func (Optional[Union[Callable[P, T], AsyncRunner]]): + The function to be scheduled. If this is an AsyncRunner, + the current state is simply updated with new arguments. + *args: Positional arguments to be passed to `func.` + snap (Optional[Union[float, int]]): + If set to a numeric value, the new function will be + deferred until the next bar + `snap` beats arrives. + If None, the function is immediately pushed and will + run on its next interval. + If `func` is an AsyncRunner, this parameter has no effect. + **kwargs: Keyword arguments to be passed to `func.` + """ + + def decorator(func: Union[Callable, AsyncRunner]) -> AsyncRunner: + if isinstance(func, AsyncRunner): + func.update_state(*args, **kwargs) + bowl.scheduler.start_runner(func) + return func + + runner = bowl.scheduler.get_runner(func.__name__) + if runner is None: + runner = AsyncRunner(func.__name__) + + # Runners normally allow the same functions to appear in the stack, + # but we will treat repeat functions as just reloading the runner + if runner.states and runner.states[-1].func is func: + again(runner) + bowl.scheduler.start_runner(runner) + return runner + elif snap is not None: + deadline = get_snap_deadline(bowl.clock, snap) + runner.push_deferred(deadline, func, *args, **kwargs) + else: + runner.push(func, *args, **kwargs) + + # Intentionally avoid interval correction so + # the user doesn't accidentally nudge the runner + runner.swim() + runner.reload() + + bowl.scheduler.start_runner(runner) + return runner + + if func is not None: + return decorator(func) + return decorator + + +def again(runner: AsyncRunner, *args, **kwargs): + """ + Keep a runner swimming. User functions should continuously call this + at the end of their function until they want the function to stop. + """ + runner.update_state(*args, **kwargs) + runner.swim() + # If this is manually called we should wake up the runner sooner + runner.reload() + + +def die(func: Union[Callable, AsyncRunner]) -> AsyncRunner: + """ + Swimming decorator: remove a function from the scheduler. The function + will not be called again and will likely stop recursing in time. + """ + if isinstance(func, AsyncRunner): + bowl.scheduler.stop_runner(func) + return func + + runner = bowl.scheduler.get_runner(func.__name__) + if runner is not None: + bowl.scheduler.stop_runner(runner) + else: + runner = AsyncRunner(func.__name__) + runner.push(func) + return runner + + +def sleep(n_beats: Union[int, float]): + """Artificially sleep in the current function for `n_beats`. + + Example usage: :: + + @swim + def func(p=4): + sleep(3) + for _ in range(3): + S('909').out() + sleep(1/2) + again(func) + + This should *only* be called inside swimming functions. + Unusual behaviour may occur if sleeping is done globally. + + Using in asynchronous functions + ------------------------------- + + This can be used in `async def` functions and does *not* need to be awaited. + + Sounds scheduled in asynchronous functions will be influenced by + real time passing. For example, if you sleep for 500ms (based on tempo) + and await a function that takes 100ms to complete, any sounds sent + afterwards will occur 600ms from when the function was called. + + :: + + @swim + async def func(p=4): + print(bowl.clock.time) # 0.0s + + sleep(1) # virtual +500ms (assuming bowl.clock.tempo = 120) + await abc() # real +100ms + + S('bd').out() # occurs 500ms from now + print(bowl.clock.time) # 0.6s + again(func) + + Technical Details + ----------------- + + Unlike `time.sleep(n)`, this function does not actually block + the function from running. Instead, it temporarily affects the + value of `BaseClock.time` and extends the perceived time of methods + using that property, like `SleepHandler.wait_after()` + and `BaseClock.get_beat_time()`. + + In essence, this maintains the precision of sound scheduling + without requiring the use of declarative syntax like + `S('909', at=1/2).out()`. + + """ + duration = bowl.clock.get_beat_time(n_beats, sync=False) + bowl.time.shift += duration + + +def silence(*runners: AsyncRunner) -> None: + """ + Silence is capable of stopping one or all currently running swimming functions. The + function will also trigger a general MIDI note_off event (all channels, all notes). + This function will only kill events on the Sardine side. For a function capable of + killing synthesizers running on SuperCollider, try the more potent 'panic' function. + """ + if len(runners) == 0: + midi.all_notes_off() + bowl.scheduler.reset() + return + + for run in runners: + bowl.scheduler.stop_runner(run) + + +def panic(*runners: AsyncRunner) -> None: + """ + If SuperCollider/SuperDirt is booted, panic acts as a more powerful alternative to + silence() capable of killing synths on-the-fly. Use as a last ressource if you are + loosing control of the system. + """ + silence(*runners) + if config.superdirt_handler: + D("superpanic") + + +def Pat(pattern: str, i: int = 0, div: int = 1, rate: int = 1) -> Any: + """ + General purpose pattern interface. This function can be used to summon the global + parser stored in the fish_bowl. It is generally used to pattern outside of the + handler/sender system, if you are playing with custom libraries, imported code or + if you want to take the best of the patterning system without having to deal with + all the built-in I/O. + + Args: + pattern (str): A pattern to be parsed + i (int, optional): Index for iterators. Defaults to 0. + + Returns: + int: The ith element from the resulting pattern + """ + result = bowl.parser.parse(pattern) + return Sender.pattern_element(result, i, div, rate) + + +class Delay: + """ + Delay is a compound statement providing an alternative syntax to the overridden + sleep() method. It implements the bare minimum to reproduce sleep behavior using + extra indentation for marking visually where sleep takes effect. + """ + + def __init__(self, duration: Union[int, float] = 1, delayFirst: bool = True): + self.duration = duration + self.delayFirst = delayFirst + + def __call__(self, duration=1, delayFirst=False): + self.duration = duration + self.delayFirst = delayFirst + return self + + def __enter__(self): + if self.delayFirst: + sleep(self.duration) + + def __exit__(self, exc_type, exc_val, exc_tb): + if not self.delayFirst: + sleep(self.duration) + + +# Aliases! + +clock = bowl.clock + +I, V = bowl.iterators, bowl.variables # Iterators and Variables from env +P = Pat # Generic pattern interface +N = midi.send # For sending MIDI Notes +PC = midi.send_program # For MIDI Program changes +CC = midi.send_control # For MIDI Control Change messages +play = Player.play + + +def n(*args, **kwargs): + return play(midi, midi.send, *args, **kwargs) + + +def cc(*args, **kwargs): + return play(midi, midi.send_control, *args, **kwargs) + + +def pc(*args, **kwargs): + return play(midi, midi.send_program, *args, **kwargs) + + +if config.superdirt_handler: + D = dirt.send + + def d(*args, **kwargs): + return play(dirt, dirt.send, *args, **kwargs) + + +# Clock start +bowl.start() diff --git a/sardine/scheduler/__init__.py b/sardine/scheduler/__init__.py new file mode 100644 index 00000000..373a5c2a --- /dev/null +++ b/sardine/scheduler/__init__.py @@ -0,0 +1,2 @@ +from .async_runner import * +from .scheduler import * diff --git a/sardine/scheduler/async_runner.py b/sardine/scheduler/async_runner.py new file mode 100644 index 00000000..87be21c1 --- /dev/null +++ b/sardine/scheduler/async_runner.py @@ -0,0 +1,662 @@ +import asyncio +import heapq +import inspect +import traceback +from collections import deque +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, MutableSequence, NamedTuple, Optional, Union + +from rich import print +from rich.panel import Panel + +from ..base import BaseClock +from ..clock import Time +from ..utils import MISSING +from .constants import MaybeCoroFunc +from .errors import * + +if TYPE_CHECKING: + from ..fish_bowl import FishBowl + from .scheduler import Scheduler + +__all__ = ("AsyncRunner", "FunctionState") + + +def print_panel(text: str) -> None: + """ + Print swimming function event inside a Rich based Panel. + The box is automatically resized to fit text length. + """ + print("\n", Panel.fit(text), end="") + + +def _assert_function_signature(sig: inspect.Signature, args, kwargs): + if args: + message = "Positional arguments cannot be used in scheduling" + if missing := _missing_kwargs(sig, args, kwargs): + message += "; perhaps you meant `{}`?".format( + ", ".join(f"{k}={v!r}" for k, v in missing.items()) + ) + raise BadArgumentError(message) + + +def _discard_kwargs(sig: inspect.Signature, kwargs: dict[str, Any]) -> dict[str, Any]: + """Discards any kwargs not present in the given signature.""" + pass_through = kwargs.copy() + + for param in sig.parameters.values(): + value = kwargs.get(param.name, MISSING) + if value is not MISSING: + pass_through[param.name] = value + + return pass_through + + +def _extract_new_period( + sig: inspect.Signature, kwargs: dict[str, Any] +) -> Union[float, int]: + period = kwargs.get("p") + if period is None: + param = sig.parameters.get("p") + period = getattr(param, "default", 1) + + if not isinstance(period, (float, int)): + raise BadPeriodError(f"Period must be a float or integer, not {period!r}") + elif period <= 0: + raise BadPeriodError(f"Period must be >0, not {period}") + + return period + + +def _missing_kwargs( + sig: inspect.Signature, args: tuple[Any], kwargs: dict[str, Any] +) -> dict[str, Any]: + required = [] + defaulted = [] + for param in sig.parameters.values(): + if param.kind in ( + param.POSITIONAL_ONLY, + param.VAR_POSITIONAL, + param.VAR_KEYWORD, + ): + continue + elif param.name in kwargs: + continue + elif param.default is param.empty: + required.append(param.name) + else: + defaulted.append(param.name) + + guessed_mapping = dict(zip(required + defaulted, args)) + return guessed_mapping + + +async def _maybe_coro(func, *args, **kwargs): + if inspect.iscoroutinefunction(func): + return await func(*args, **kwargs) + return func(*args, **kwargs) + + +@dataclass +class FunctionState: + func: "MaybeCoroFunc" + args: tuple + kwargs: dict + + +class DeferredState(NamedTuple): + deadline: Union[float, int] + index: int + state: FunctionState + + +class AsyncRunner: + """Handles calling synchronizing and running a function in + the background, with support for run-time function patching. + + Runners should only be started from the `Scheduler.start_runner()` method. + + The `Scheduler.deferred` attribute is used to control if AsyncRunner + runs with an implicit time shift when calling its function or not. + This helps improve sound synchronization by giving the function + more time to execute. For example, assuming bpm = 120, `deferred=False` + would expect its function to complete instantaneously, + whereas `deferred=True` would allow a function with `p=1` + to finish execution within 500ms (1 beat) instead. + + In either case, if the function takes too long to execute, it will miss + its scheduling deadline and cause an unexpected gap between function calls. + Functions must complete within the time span to avoid this issue. + """ + + MAX_FUNCTION_STATES = 3 + + name: str + """Uniquely identifies a runner when it is added to a scheduler.""" + + scheduler: "Optional[Scheduler]" + """The scheduler this runner was added to.""" + states: MutableSequence[FunctionState] + """ + The function stack, used for auto-restoring functions. + + This is implemented with a deque to ensure a limit on how many functions + are stored in the cache. + """ + deferred_states: list[DeferredState] + """ + A heap queue storing (time, index, state) pairs. + + Unlike regular `states`, these states are left in the background until + their time arrives, at which point they are moved to the `states` sequence + and will take over the next iteration. + """ + interval_shift: float + """ + The amount of time to offset the runner's interval. + + An interval defines the amount of time between each execution + of the current function. For example, a clock with a beat duration + of 0.5s and a period of 2 beats means each interval is 1 second. + + Through interval shifting, a function can switch between different + periods/tempos and then compensate for the clock's current time to + avoid the next immediate beat being shorter than expected. + + Initially, functions have an interval shift of 0. The runner + will automatically change its interval shift when the function + schedules itself with a new period or a change in the clock's beat + duration occurs. This can lead to functions with the same period + running at different phases. To synchronize these functions together, + their interval shifts should be set to the same value (usually 0). + """ + snap: Optional[Union[float, int]] + """ + The absolute time that the next interval should start at. + + Setting this attribute will take priority over the regular interval + on the next iteration and cause the runner to wait until the snap + deadline has arrived. + + The `delay_interval()` method combines this with interval shifting + to properly delay a runner and its interval until the given deadline. + + Once this time has been passed and the next iteration was run, + this attribute will be reset to `None`. + + Note that deferred states will take priority over this, and in fact even + replace the snap, if one or more of those states specify a deadline earlier + than the current snap's deadline. + """ + + _swimming: bool + _stop: bool + _task: Optional[asyncio.Task] + _reload_event: asyncio.Event + _has_reverted: bool + + _deferred_state_index: int + + _can_correct_interval: bool + _delta: float + _expected_time: float + _last_interval: float + _last_state: Optional[FunctionState] + _sleep_drift: float + + def __init__(self, name: str): + self.name = name + self.scheduler = None + self.states = deque(maxlen=self.MAX_FUNCTION_STATES) + self.deferred_states = [] + self.interval_shift = 0.0 + self.snap = None + + self._swimming = False + self._stop = False + self._task = None + self._reload_event = asyncio.Event() + self._has_reverted = False + + self._deferred_state_index = 0 + + self._can_correct_interval = False + self._delta = 0.0 + self._expected_time = 0.0 + self._last_interval = 0.0 + self._last_state = None + self._sleep_drift = 0.0 + + def __repr__(self): + cls_name = type(self).__name__ + status = "running" if self.is_running() else "stopped" + attrs = " ".join( + f"{attr}={getattr(self, attr)!r}" + for attr in ( + "name", + "scheduler", + ) + ) + return f"<{cls_name} {status} {attrs}>" + + # Helper properties + + @property + def clock(self) -> BaseClock: + """A shorthand for the current clock.""" + return self.scheduler.env.clock + + @property + def defer_beats(self) -> float: + """The number of beats to defer function calls.""" + return float(self.scheduler.deferred) + + @property + def env(self) -> "FishBowl": + """A shorthand for the scheduler's fish bowl.""" + return self.scheduler.env + + @property + def time(self) -> Time: + """The fish bowl's current time.""" + return self.scheduler.env.time + + # State management + + def push(self, func: "MaybeCoroFunc", *args, **kwargs): + """Pushes a function state to the runner to be called in the next iteration. + + It is recommended to reload the runner after this in case the + current iteration sleeps past the deadline. + + Note that this does not take priority over the `snap` attribute; + if a snap is specified, the runner will continue to wait for that + deadline to pass. If running a new function immediately is desired, + the `snap` should be set to `None` before reloading the runner. + + Args: + func (MaybeCoroFunc): The function to add. + *args: The positional arguments being passed to `func`. + **kwargs: The keyword arguments being passed to `func`. + + Raises: + BadFunctionError: The value given for `func` must be callable. + """ + if not callable(func): + raise BadFunctionError(f"Expected a callable, got {func!r}") + elif not self.states: + state = FunctionState(func, args, kwargs) + return self.states.append(state) + + last_state = self.states[-1] + + # Transfer arguments from last state if possible + # (`_runner()` will discard excess arguments later) + args = args + last_state.args[len(args) :] + kwargs = last_state.kwargs | kwargs + self.states.append(FunctionState(func, args, kwargs)) + + def push_deferred( + self, deadline: Union[float, int], func: "MaybeCoroFunc", *args, **kwargs + ): + """Adds a function to a queue to eventually be run. + + It is recommended to reload the runner after this in case the + current iteration sleeps past the deadline. + + If there is an existing `snap` deadline, deferred states will take + priority and replace the `snap` attribute to ensure they run on time. + + Args: + time (Union[float, int]): + The absolute clock time to wait before the function state + is pushed. + func (MaybeCoroFunc): The function to add. + *args: The positional arguments being passed to `func`. + **kwargs: The keyword arguments being passed to `func`. + + Raises: + BadFunctionError: The value given for `func` must be callable. + """ + if not callable(func): + raise BadFunctionError(f"Expected a callable, got {func!r}") + + if not self.deferred_states: + self._deferred_state_index = 0 + + index = self._deferred_state_index + self._deferred_state_index += 1 + + state = FunctionState(func, args, kwargs) + heapq.heappush(self.deferred_states, DeferredState(deadline, index, state)) + + def update_state(self, *args, **kwargs): + """Updates the top-most function state with new arguments. + + This assumes that the function is rescheduling itself, and + will therefore allow interval correction to occur in case + the period or tempo has changed. + """ + if not self.states: + return # reset_states() was likely called + last_state = self.states[-1] + last_state.args = args + last_state.kwargs = kwargs + self.allow_interval_correction() + + def reload(self): + """Triggers an immediate state reload. + + This method is useful when changes to the clock occur, + or when a new function is pushed to the runner. + """ + self._reload_event.set() + + # Lifecycle control + + def start(self): + """Initializes the background runner task. + + If the task has already started, this is a no-op. + """ + if self.is_running(): + return + + self._task = asyncio.create_task(self._runner()) + self._task.add_done_callback(asyncio.Task.result) + + def is_running(self) -> bool: + """Returns True if the runner is running.""" + return self._task is not None and not self._task.done() + + def swim(self): + """Allows the runner to continue the next iteration. + + This method must be called continuously to keep the runner alive. + """ + self._swimming = True + + def stop(self): + """Stops the runner's execution after the current iteration. + + This method takes precedence when `swim()` is also called. + """ + self._stop = True + self.reload() + + def reset_states(self): + """Clears all function states from the runner. + + This method can safely be called while the runner is running. + In such case, the runner will stop by itself on the next + iteration unless a new state is pushed after this method. + """ + self.states.clear() + self.deferred_states.clear() + + # Interval shifting + + def allow_interval_correction(self): + """Allows the interval to be corrected in the next iteration.""" + self._can_correct_interval = True + + def delay_interval(self, deadline: Union[float, int], period: Union[float, int]): + """Delays the next iteration until the given deadline has passed. + + This is equivalent to setting the runner's `snap` attribute + to the deadline and also applying an appropriate interval + shift to synchronize the period. + + The runner must be started from a scheduler before this method can + be used. In addition, at least one function state must be pushed to + the runner (deferred or not) in order to calculate the interval shift. + + To take effect immediately, the runner should be reloaded + to skip the current iteration. + + Args: + time (Union[float, int]): The absolute time to wait. + period (Union[float, int]): The period to synchronize to. + + Raises: + RuntimeError: A function must be pushed before this can be used. + """ + self.snap = deadline + # Unlike _correct_interval(), we don't need to worry about delta + # here. _get_corrected_interval() ignores the interval until the + # snap time has passed, at which point any sleep drift will be + # accounted by `get_beat_time()` as per normal operation. + with self.time.scoped_shift(deadline - self.clock.time): + self.interval_shift = self.clock.get_beat_time(period) + + def _correct_interval(self, period: Union[float, int]): + """Checks if the interval should be corrected. + + Interval correction occurs when `allow_interval_correction()` + is called, and the given interval is different from the last + interval *only* for the current iteration. If the interval + did not change, interval correction must be requested again. + + Args: + period (Union[float, int]): + The period being used in the current iteration. + """ + interval = period * self.clock.beat_duration + if self._can_correct_interval and interval != self._last_interval: + with self.time.scoped_shift(-self._delta): + self.interval_shift = self.clock.get_beat_time(period) + + self._last_interval = interval + self._can_correct_interval = False + + def _get_corrected_interval(self, period: Union[float, int]) -> float: + """Returns the amount of time until the next interval. + + The base interval is determined by the `period` argument, + and then offsetted by the `interval_shift` attribute. + + If the `snap` attribute is set to an absolute time + and the current clock time has not passed the snap, + it will take priority over whatever period was passed. + + Args: + period (Union[float, int]): + The number of beats in the interval. + + Returns: + float: The amount of time until the next interval is reached. + """ + snap_duration = self._get_snap_duration() + if snap_duration is not None: + return snap_duration + + with self.time.scoped_shift(self.interval_shift - self._delta): + # If the interval was corrected, this should equal to: + # `period * beat_duration` + expected_duration = self.clock.get_beat_time(period) + self._expected_time = self.clock.shifted_time + expected_duration + return expected_duration - self._delta + + def _get_snap_duration(self) -> Optional[float]: + """Returns the amount of time to wait for the snap, if any. + + If the `snap` attribute is None, this returns None. + """ + if self.snap is None: + return None + return max(0.0, self.snap - self.clock.time) + + # Runner loop + + async def _runner(self): + try: + self._prepare() + except Exception as exc: + self._revert_state() + raise exc + + print_panel(f"[yellow][[red]{self.name}[/red] is swimming][/yellow]") + + try: + while self._is_ready_for_iteration(): + try: + await self._run_once() + except Exception as exc: + print(f"[red][Function exception | ({self.name})]") + traceback.print_exception(type(exc), exc, exc.__traceback__) + + self._revert_state() + self.swim() + finally: + print_panel(f"[yellow][Stopped [red]{self.name}[/red]][/yellow]") + + def _prepare(self): + self._last_state = self._get_state() + self._swimming = True + self._stop = False + self._delta = 0.0 + + period = self._get_period(self._last_state) + self._get_corrected_interval(period) # sets `_expected_time` + self._last_interval = period * self.clock.beat_duration + + async def _run_once(self): + self._swimming = False + self._reload_event.clear() + + state = self._get_state() + + if state is not None: + self._maybe_print_new_state(state) + self._last_state = state + signature = inspect.signature(state.func) + + _assert_function_signature(signature, state.args, state.kwargs) + args = state.args + # Prevent any TypeErrors when the user reduces the signature + kwargs = _discard_kwargs(signature, state.kwargs) + period = _extract_new_period(signature, state.kwargs) + + self._correct_interval(period) + duration = self._get_corrected_interval(period) + self._expected_time = self.clock.time + duration + + # Push any deferred states that have or will arrive onto the stack + arriving_states: list[DeferredState] = [] + while self.deferred_states: + entry = self.deferred_states[0] + if ( + self.clock.time >= entry.deadline + or state is not None + and self._expected_time >= entry.deadline + ): + heapq.heappop(self.deferred_states) + arriving_states.append(entry) + else: + break + + if arriving_states: + latest_entry = arriving_states[-1] + self.states.extend(e.state for e in arriving_states) + # In case the new state has a faster interval than before, + # delay it so it doesn't run too early + self.delay_interval( + latest_entry.deadline, + self._get_period(latest_entry.state), + ) + self.swim() + return + elif state is None: + # Nothing to do until the next deferred state arrives + deadline = self.deferred_states[0].deadline + duration = deadline - self.clock.time + interrupted = await self._sleep(duration) + self.swim() + return + + # NOTE: duration will always be defined at this point + interrupted = await self._sleep(duration) + if interrupted: + self.swim() + return + + try: + # Use copied context in function by creating it as a task + await asyncio.create_task( + self._call_func(state.func, args, kwargs), + name=f"asyncrunner-func-{self.name}", + ) + finally: + self._delta = self.clock.time - self._expected_time + self._check_snap() + + async def _call_func(self, func, args, kwargs): + """Calls the given function and optionally applies time shift + according to the `defer_beats` attribute. + """ + shift = self.defer_beats * self.clock.beat_duration - self._sleep_drift + self.time.shift += shift + + return await _maybe_coro(func, *args, **kwargs) + + def _check_snap(self) -> None: + if self.snap is not None and self.clock.time > self.snap: + self.snap = None + + @staticmethod + def _get_period(state: Optional[FunctionState]) -> Union[float, int]: + if state is None: + return 0.0 + + return _extract_new_period(inspect.signature(state.func), state.kwargs) + + def _get_state(self) -> Optional[FunctionState]: + return self.states[-1] if self.states else None + + def _is_ready_for_iteration(self) -> bool: + return ( + (self.states or self.deferred_states) + and self._swimming # self.swim() + and not self._stop # self.stop() + ) + + def _maybe_print_new_state(self, state: FunctionState): + if self._last_state is not None and state is not self._last_state: + if not self._has_reverted: + print_panel(f"[yellow][Updating [red]{self.name}[/red]]") + else: + print_panel(f"[yellow][Saving [red]{self.name}[/red] from crash]") + self._has_reverted = False + + async def _sleep(self, duration: Union[float, int]) -> bool: + """Sleeps for the given duration or until the runner is reloaded. + + Args: + duration (Union[float, int]): The amount of time to sleep. + + Returns: + bool: True if the runner was reloaded, False otherwise. + """ + if duration <= 0: + return self._reload_event.is_set() + + wait_task = asyncio.create_task(self.env.sleep(duration)) + reload_task = asyncio.create_task(self._reload_event.wait()) + done, pending = await asyncio.wait( + (wait_task, reload_task), + return_when=asyncio.FIRST_COMPLETED, + ) + + self._sleep_drift = self.clock.time - self._expected_time + + for task in pending: + task.cancel() + for task in done: + task.result() + + return reload_task in done + + def _revert_state(self): + if self.states: + self.states.pop() + self._has_reverted = True diff --git a/sardine/scheduler/constants.py b/sardine/scheduler/constants.py new file mode 100644 index 00000000..8403c10a --- /dev/null +++ b/sardine/scheduler/constants.py @@ -0,0 +1,6 @@ +from typing import Awaitable, Callable, TypeVar, Union + +__all__ = ("MaybeCoroFunc", "T") + +T = TypeVar("T") +MaybeCoroFunc = Callable[..., Union[T, Awaitable[T]]] diff --git a/sardine/scheduler/errors.py b/sardine/scheduler/errors.py new file mode 100644 index 00000000..54967337 --- /dev/null +++ b/sardine/scheduler/errors.py @@ -0,0 +1,14 @@ +class SchedulerError(Exception): + """An error associated with the scheduler and its async runners.""" + + +class BadFunctionError(SchedulerError): + """A function pushed to the runner was unacceptable for execution.""" + + +class BadArgumentError(BadFunctionError): + """The arguments being given to the function were not acceptable.""" + + +class BadPeriodError(BadFunctionError): + """The period for a given function was not valid.""" diff --git a/sardine/scheduler/scheduler.py b/sardine/scheduler/scheduler.py new file mode 100644 index 00000000..a9ed8011 --- /dev/null +++ b/sardine/scheduler/scheduler.py @@ -0,0 +1,123 @@ +from typing import Optional + +from ..base import BaseHandler +from ..utils import plural +from .async_runner import AsyncRunner + +__all__ = ("Scheduler",) + + +class Scheduler(BaseHandler): + def __init__( + self, + deferred_scheduling: bool = True, + ): + super().__init__() + self._runners: dict[str, AsyncRunner] = {} + self.deferred = deferred_scheduling + + def __repr__(self) -> str: + n_runners = len(self._runners) + return "<{} ({} {}) deferred={}>".format( + type(self).__name__, + n_runners, + plural(n_runners, "runner"), + self.deferred, + ) + + @property + def runners(self) -> list[AsyncRunner]: + """A list of the current runners stored in the scheduler.""" + return list(self._runners.values()) + + # Public methods + + def get_runner(self, name: str) -> Optional[AsyncRunner]: + """Retrieves the runner with the given name from the scheduler.""" + return self._runners.get(name) + + def start_runner(self, runner: AsyncRunner): + """Adds the runner to the scheduler and starts it. + + If the runner is already running on the same scheduler, + this will only update the scheduler's internal reference. + + Args: + runner (AsyncRunner): The runner to schedule and start. + + Raises: + ValueError: + The runner is either running on another scheduler or + has a name conflicting with a different runner instance. + """ + if ( + runner.is_running() + and runner.scheduler is not None + and runner.scheduler is not self + ): + raise ValueError(f"Runner {runner.name!r} is running on another scheduler") + + old = self.get_runner(runner.name) + if old is not None and old is not runner: + raise ValueError( + f"A different runner already exists with the name {runner.name!r}" + ) + + self._runners[runner.name] = runner + runner.scheduler = self + runner.start() + + def stop_runner(self, runner: AsyncRunner, *, reset_states: bool = True): + """Removes the runner from the scheduler and stops it. + + Args: + runner (AsyncRunner): The runner to remove. + reset_states (bool): + If True, `AsyncRunner.reset_states()` will be called. + + Raises: + ValueError: The runner is running on another scheduler. + """ + if ( + runner.is_running() + and runner.scheduler is not None + and runner.scheduler is not self + ): + raise ValueError(f"Runner {runner.name!r} is running on another scheduler") + + # We don't set `runner.scheduler = None` because it might + # break the background task in the process + runner.stop() + runner.reload() + + if reset_states: + runner.reset_states() + + if self._runners.get(runner.name) is runner: + del self._runners[runner.name] + + def reset(self, *args, **kwargs): + """Stops and removes all runners from the scheduler. + + Args: + *args: Positional arguments to be passed to `stop_runner()`. + **kwargs: Keyword arguments to be passed to `stop_runner()`. + """ + for runner in self.runners: + self.stop_runner(runner, *args, **kwargs) + + # Internal methods + + def _reload_runners(self, *, interval_correction: bool): + for runner in self._runners.values(): + runner.reload() + + if interval_correction: + runner.allow_interval_correction() + + def setup(self): + self.register("stop") + + def hook(self, event: str, *args): + if event == "stop": + self.reset() diff --git a/sardine/sequences/QuickStep.py b/sardine/sequences/QuickStep.py deleted file mode 100644 index 603b4cc4..00000000 --- a/sardine/sequences/QuickStep.py +++ /dev/null @@ -1,212 +0,0 @@ -# QuickStep mode ############################################################### -# -# Quickstep is another dance similar to FoxTrot. All in all, this is as bad pun -# to name this feature: an emulation of FoxDot (https://foxdot.org/) patterning -# system. It works in a rather similar way, at least for the public interface. -# The rest is just carefully attributing senders to a _global_runner function -# that behaves just like any other swimming function. -# -# It can be useful to quickly lay down some drumming materials while using swim- -# ming functions for more delicate operations :) -################################################################################ -from string import ascii_uppercase, ascii_lowercase -from typing import Union, TYPE_CHECKING -from rich import print -from rich.panel import Panel - - -if TYPE_CHECKING: - from ..io.MIDISender import MIDISender - from ..io.SuperDirtSender import SuperDirtSender - from ..io.OSCSender import OSCSender - -__all__ = ("Player", "PatternHolder") - - -class Player: - - """ - A Player is a lone Sender that will be activated by a central QuickStep - swimming function. It contains the sender and basic information about - div, rate, etc... - """ - - def __init__( - self, - clock, - name: str, - content: Union[None, dict] = {}, - rate: Union[int, float] = 1, - ): - self._clock = clock - self._name = name - self._content = content - self._rate = rate - self._dur = 1 - self._div = int( - self._conversion_function(low=1, high=self._clock.ppqn * 8, value=self._dur) - ) - - def _conversion_function( - self, low: Union[int, float], high: Union[int, float], value: Union[int, float] - ) -> int: - """Internal function performing the conversion""" - - def remap(x, in_min, in_max, out_min, out_max): - return (x - in_min) * (out_max - out_min) / (in_max - in_min) + out_min - - return remap(value, 0, 4, 1, 127) - - @classmethod - def play(cls, *args, **kwargs): - """ - The play method will call a SuperDirtSender - """ - return {"type": "sound", "args": args, "kwargs": kwargs} - - @classmethod - def play_midi(cls, *args, **kwargs): - """ - The play MIDI method will call a MIDISender - """ - return {"type": "MIDI", "args": args, "kwargs": kwargs} - - @classmethod - def play_osc(cls, *args, **kwargs): - """ - The play_osc method will call an OSCSender - """ - return {"type": "OSC", "args": args, "kwargs": kwargs} - - def __rshift__(self, method_result): - """ - Public method for Players - """ - print(Panel.fit(f"[yellow][[red]{self._name}[/red] update!][/yellow]")) - self._content = method_result - - @property - def rate(self) -> Union[int, float]: - return self._rate - - @rate.setter - def rate(self, value: Union[int, float]) -> None: - self._rate = value - - @property - def dur(self): - return self._dur - - @dur.setter - def dur(self, value: int) -> None: - """ - Div for surfboards does not have the same behavior it has in regular swimming - functions. It would be counter-intuitive. In that mode, the div should be in- - terpreted as a speed, with speed=0.01 being the absolute lowest speed a surf- - board can go. It means that the lowest value is some arbitrary cap we choose - to follow such as self._clock.ppqn * 4 for instance. - - The high limit should feel like we are going insanely fast but still yield to - something like div=1 internally. - - Args: - value (int): the new 'speed' factor - """ - slow_limit = self._clock.ppqn * 8 - fast_limit = 1 - - new_div = int(self._conversion_function(slow_limit, fast_limit, value)) - # Dumb corrections - new_div = 1 if new_div == 0 else new_div - new_div = slow_limit if new_div > slow_limit else new_div - - self._dur = value - self._div = new_div - - def __repr__(self) -> str: - return f"[Player {self._name}]: {self._content}, div: {self._div}, rate: {self._rate}" - - -class PatternHolder: - - """ - A Pattern Holder, at core, is simply a dict. This dict will contaminate the - global namespace with references to players, just like FoxDot. Dozens of re- - ferences to Players() will be inserted in the global namespace to be used by - musicians. - - Players are senders in disguise. This tiny object will hold all the required - information to play a sender, including its rate, div, etc... - """ - - def __init__( - self, - MIDISender: "MIDISender", - OSCSender: "OSCSender", - SuperDirtSender: "SuperDirtSender", - clock, - ): - self._midisender = MIDISender - self._oscsender = OSCSender - self._superdirtsender = SuperDirtSender - self._clock = clock - self._speed = 1 - self._patterns = {} - self._init_internal_dictionary() - - def __repr__(self) -> str: - return f"Surfboard || speed: {self._speed}" - - @property - def speed(self): - return self._speed - - @speed.setter - def speed(self, value): - self._speed = float(value) - - def reset(self): - """ - Reset the internal dictionary of player/senders. - """ - for key in self._patterns.keys(): - self._patterns[key]._content = {} - - def _init_internal_dictionary(self): - """ - Initialisation process. Create the dictionary keys, add one player per - key. We can't push the dictionary to globals now. It needs to be done - during the __init__ process like so: - - for (k, v) in self._patterns.items(): - globals()[k] = v - """ - names = ["P" + l for l in ascii_uppercase + ascii_lowercase] - self._patterns = {k: Player(clock=self._clock, name=k) for k in names} - - def _global_runner(self, d=1, i=0): - """ - This is a template for a global swimming function that can hold all - the player/senders together for scheduling. - """ - d = self._speed / (self._clock.ppqn / 2) - patterns = [p for p in self._patterns.values() if p._content not in [None, {}]] - for player in patterns: - try: - if player._content["type"] == "MIDI": - self._midisender( - note=player._content['args'][0], - **player._content["kwargs"]).out( - i=i, div=player._div, rate=player._rate - ) - elif player._content["type"] == "OSC": - self._oscsender( - *player._content["args"], **player._content["kwargs"] - ).out(i=i, div=player._div, rate=player._rate) - elif player._content["type"] == "sound": - self._superdirtsender( - *player._content["args"], **player._content["kwargs"] - ).out(i=i, div=player._div, rate=player._rate) - except Exception as e: - continue - self._clock.schedule_func(self._global_runner, d=d, i=i + 1) diff --git a/sardine/sequences/__init__.py b/sardine/sequences/__init__.py index 9b2b2b53..5a5eec62 100644 --- a/sardine/sequences/__init__.py +++ b/sardine/sequences/__init__.py @@ -1,4 +1,5 @@ -from .Sequence import * -from .Chance import * -from .LexerParser.ListParser import * -from .QuickStep import * +from .chance import * +from .iterators import * +from .sardine_parser import * +from .sequence import * +from .variables import * diff --git a/sardine/sequences/Chance.py b/sardine/sequences/chance.py similarity index 100% rename from sardine/sequences/Chance.py rename to sardine/sequences/chance.py index 34a144a8..e95a9472 100644 --- a/sardine/sequences/Chance.py +++ b/sardine/sequences/chance.py @@ -1,5 +1,5 @@ -from random import randint, random from random import choices as randomChoices +from random import randint, random # ==============================================================================# diff --git a/sardine/sequences/Iterators.py b/sardine/sequences/iterators.py similarity index 98% rename from sardine/sequences/Iterators.py rename to sardine/sequences/iterators.py index 3ea5a74a..def6f924 100644 --- a/sardine/sequences/Iterators.py +++ b/sardine/sequences/iterators.py @@ -1,7 +1,9 @@ -from string import ascii_letters from itertools import count +from string import ascii_letters from typing import Union +__all__ = ("Iterator",) + class Iterator(object): def __init__(self): diff --git a/sardine/sequences/sardine_parser/__init__.py b/sardine/sequences/sardine_parser/__init__.py new file mode 100644 index 00000000..2b16f03b --- /dev/null +++ b/sardine/sequences/sardine_parser/__init__.py @@ -0,0 +1,2 @@ +from .chord import * +from .list_parser import * diff --git a/sardine/sequences/LexerParser/Chords.py b/sardine/sequences/sardine_parser/chord.py similarity index 100% rename from sardine/sequences/LexerParser/Chords.py rename to sardine/sequences/sardine_parser/chord.py diff --git a/sardine/sequences/LexerParser/FuncLibrary.py b/sardine/sequences/sardine_parser/funclib.py similarity index 98% rename from sardine/sequences/LexerParser/FuncLibrary.py rename to sardine/sequences/sardine_parser/funclib.py index afaa3651..7709c8db 100644 --- a/sardine/sequences/LexerParser/FuncLibrary.py +++ b/sardine/sequences/sardine_parser/funclib.py @@ -1,15 +1,17 @@ import random import statistics from collections.abc import Iterable -from .Utilities import zip_cycle, map_unary_function, map_binary_function -from .Chords import Chord -from itertools import cycle, islice, chain +from functools import partial +from itertools import chain, cycle, islice from math import cos, sin, tan -from typing import Union, Callable, Optional from random import shuffle -from functools import partial -from ...sequences.Sequence import euclid -from easing_functions import BounceEaseIn, BounceEaseOut, BounceEaseInOut +from typing import Callable, Optional, Union + +from easing_functions import BounceEaseIn, BounceEaseInOut, BounceEaseOut + +from ..sequence import euclid +from .chord import Chord +from .utils import map_binary_function, map_unary_function, zip_cycle qualifiers = { "dim": [0, 3, 6, 12], diff --git a/sardine/sequences/LexerParser/ListParser.py b/sardine/sequences/sardine_parser/list_parser.py similarity index 86% rename from sardine/sequences/LexerParser/ListParser.py rename to sardine/sequences/sardine_parser/list_parser.py index db47a10b..d570f6fe 100644 --- a/sardine/sequences/LexerParser/ListParser.py +++ b/sardine/sequences/sardine_parser/list_parser.py @@ -1,9 +1,12 @@ -from lark import Lark, Tree from pathlib import Path -from .TreeCalc import CalculateTree -from .Chords import Chord + +from lark import Lark, Tree from rich import print +from ...base import BaseParser +from .chord import Chord +from .tree_calc import CalculateTree + # __all__ = ("ListParser", "Pnote", "Pname", "Pnum") __all__ = ("ListParser", "Pat") @@ -18,16 +21,13 @@ class ParserError(Exception): # build an abstract syntax tree and get the combination rules for each token. grammar_path = Path(__file__).parent -grammar = grammar_path / "grammars/proto.lark" +grammar = grammar_path / "sardine.lark" -class ListParser: +class ListParser(BaseParser): def __init__( self, - clock, - iterators, - variables, - parser_type: str = "proto", + parser_type: str = "sardine", debug: bool = False, ): """ListParser is the main interface for the pattern syntax. It can be @@ -41,14 +41,16 @@ def __init__( Args: parser_type (str, optional): Type of parser. Defaults to "number". """ - # Reference to clock for the "t" grammar token - self.clock = clock + super().__init__() self.debug = debug - self.iterators = iterators - self.variables = variables + self.parser_type = parser_type + + def __repr__(self) -> str: + return f"<{type(self).__name__} debug={self.debug} type={self.parser_type!r}>" + def setup(self): parsers = { - "proto": { + "sardine": { "raw": Lark.open( grammar, rel_to=__file__, @@ -65,17 +67,17 @@ def __init__( cache=True, lexer="contextual", transformer=CalculateTree( - self.clock, self.iterators, self.variables + self.env.clock, self.env.iterators, self.env.variables ), ), }, } try: - self._result_parser = parsers[parser_type]["full"] - self._printing_parser = parsers[parser_type]["raw"] + self._result_parser = parsers[self.parser_type]["full"] + self._printing_parser = parsers[self.parser_type]["raw"] except KeyError: - ParserError(f"Invalid Parser grammar, {parser_type} is not a grammar.") + ParserError(f"Invalid Parser grammar, {self.parser_type} is not a grammar.") def __flatten_result(self, pat): """Flatten a nested list, for usage after parsing a pattern. Will @@ -123,7 +125,7 @@ def print_tree_only(self, expression: str): """ print(Tree.pretty(self._printing_parser.parse(expression))) - def parse(self, pattern: str): + def parse(self, *args): """Main method to parse a pattern. Parses 'pattern' and returns a flattened list to index on to extract individual values. Note that this function is temporary. Support for stacked values is @@ -138,6 +140,7 @@ def parse(self, pattern: str): Returns: list: The parsed pattern as a list of values """ + pattern = args[0] final_pattern = [] try: final_pattern = self._result_parser.parse(pattern) @@ -175,6 +178,6 @@ def Pat(pattern: str, i: int = 0): Returns: int: The ith element from the resulting pattern """ - parser = ListParser(clock=c, parser_type="proto") + parser = ListParser(clock=c, parser_type="sardine") result = parser.parse(pattern) return result[i % len(result)] diff --git a/sardine/sequences/LexerParser/grammars/proto.lark b/sardine/sequences/sardine_parser/sardine.lark similarity index 100% rename from sardine/sequences/LexerParser/grammars/proto.lark rename to sardine/sequences/sardine_parser/sardine.lark diff --git a/sardine/sequences/LexerParser/TreeCalc.py b/sardine/sequences/sardine_parser/tree_calc.py similarity index 87% rename from sardine/sequences/LexerParser/TreeCalc.py rename to sardine/sequences/sardine_parser/tree_calc.py index f490d640..83dc9fd5 100644 --- a/sardine/sequences/LexerParser/TreeCalc.py +++ b/sardine/sequences/sardine_parser/tree_calc.py @@ -1,17 +1,18 @@ -from lark import Transformer, v_args -from typing import Union -from .Utilities import zip_cycle, map_unary_function, map_binary_function, CyclicalList -from .Chords import Chord -from . import FuncLibrary -from lark.lexer import Token -from typing import Any -from itertools import cycle, takewhile, count -from time import time import datetime import random +from itertools import count, cycle, takewhile +from time import time +from typing import Any, Union + +from lark import Transformer, v_args +from lark.lexer import Token from rich import print from rich.panel import Panel +from . import funclib +from .chord import Chord +from .utils import CyclicalList, map_binary_function, map_unary_function, zip_cycle + @v_args(inline=True) class CalculateTree(Transformer): @@ -158,11 +159,11 @@ def get_slice(self, content: list, list_slice: list) -> list: def make_chord(self, *args: list): """Turn a list into a chord""" - return FuncLibrary.chordify(*sum(args, start=[])) + return funclib.chordify(*sum(args, start=[])) def chord_reverse(self, notes: list, inversion: list) -> list: """Chord inversion upwards""" - return FuncLibrary.invert(notes, [int(inversion[0])]) + return funclib.invert(notes, [int(inversion[0])]) def note_octave_up(self, note): """Move a note one octave up""" @@ -196,7 +197,7 @@ def add_qualifier(self, note, *quali): quali = "".join([str(x) for x in quali]) try: return map_binary_function( - lambda x, y: x + y, note, FuncLibrary.qualifiers[str(quali)] + lambda x, y: x + y, note, funclib.qualifiers[str(quali)] ) except KeyError: return note @@ -225,7 +226,7 @@ def make_list(self, *args): def get_time(self): """Return current clock time (tick) as integer""" - return [int(self.clock.tick)] + return [self.clock.time] def get_year(self): """Return current clock time (tick) as integer""" @@ -257,11 +258,11 @@ def get_microsecond(self): def get_measure(self): """Return current measure (bar) as integer""" - return [int(self.clock.bar)] + return [self.clock.bar] def get_phase(self): """Return current phase (phase) as integer""" - return [int(self.clock.phase)] + return [self.clock.phase] def get_unix_time(self): """Return current unix time as integer""" @@ -417,42 +418,42 @@ def _simple_association(name, value): def function_call(self, func_name, *args): modifiers_list = { # Voice leading operations - "dmitri": FuncLibrary.dmitri, - "voice": FuncLibrary.find_voice_leading, - "sopr": FuncLibrary.soprano, - "quant": FuncLibrary.quantize, - "disco": FuncLibrary.disco, - "adisco": FuncLibrary.antidisco, - "bass": FuncLibrary.bassify, - "sopr": FuncLibrary.soprano, - "invert": FuncLibrary.invert, - "aspeed": FuncLibrary.anti_speed, + "dmitri": funclib.dmitri, + "voice": funclib.find_voice_leading, + "sopr": funclib.soprano, + "quant": funclib.quantize, + "disco": funclib.disco, + "adisco": funclib.antidisco, + "bass": funclib.bassify, + "sopr": funclib.soprano, + "invert": funclib.invert, + "aspeed": funclib.anti_speed, # Boolean mask operations - "euclid": FuncLibrary.euclidian_rhythm, - "mask": FuncLibrary.mask, - "vanish": FuncLibrary.remove_x, - "expand": FuncLibrary.expand, - "pal": FuncLibrary.palindrome, - "apal": FuncLibrary.alternative_palindrome, - "rev": FuncLibrary.reverse, - "leave": FuncLibrary.leave, - "inp": FuncLibrary.insert_pair, - "in": FuncLibrary.insert, - "inprot": FuncLibrary.insert_pair_rotate, - "inrot": FuncLibrary.insert_rotate, - "shuf": FuncLibrary.shuffle, + "euclid": funclib.euclidian_rhythm, + "mask": funclib.mask, + "vanish": funclib.remove_x, + "expand": funclib.expand, + "pal": funclib.palindrome, + "apal": funclib.alternative_palindrome, + "rev": funclib.reverse, + "leave": funclib.leave, + "inp": funclib.insert_pair, + "in": funclib.insert, + "inprot": funclib.insert_pair_rotate, + "inrot": funclib.insert_rotate, + "shuf": funclib.shuffle, # Math functions - "clamp": FuncLibrary.clamp, - "sin": FuncLibrary.sinus, - "cos": FuncLibrary.cosinus, - "tan": FuncLibrary.tangent, - "abs": FuncLibrary.absolute, - "max": FuncLibrary.maximum, - "min": FuncLibrary.minimum, - "mean": FuncLibrary.mean, - "scale": FuncLibrary.scale, - "filt": FuncLibrary.custom_filter, - "quant": FuncLibrary.quantize, + "clamp": funclib.clamp, + "sin": funclib.sinus, + "cos": funclib.cosinus, + "tan": funclib.tangent, + "abs": funclib.absolute, + "max": funclib.maximum, + "min": funclib.minimum, + "mean": funclib.mean, + "scale": funclib.scale, + "filt": funclib.custom_filter, + "quant": funclib.quantize, } try: return modifiers_list[func_name](*args) diff --git a/sardine/sequences/LexerParser/Utilities.py b/sardine/sequences/sardine_parser/utils.py similarity index 96% rename from sardine/sequences/LexerParser/Utilities.py rename to sardine/sequences/sardine_parser/utils.py index da7cbd03..f4227c6d 100644 --- a/sardine/sequences/LexerParser/Utilities.py +++ b/sardine/sequences/sardine_parser/utils.py @@ -1,6 +1,6 @@ -from itertools import islice, count, cycle -from itertools import cycle, takewhile, dropwhile -from .Chords import Chord +from itertools import count, cycle, dropwhile, islice, takewhile + +from .chord import Chord def floating_point_range(start, end, step): diff --git a/sardine/sequences/Sequence.py b/sardine/sequences/sequence.py similarity index 98% rename from sardine/sequences/Sequence.py rename to sardine/sequences/sequence.py index c3228301..2d6bb4ce 100644 --- a/sardine/sequences/Sequence.py +++ b/sardine/sequences/sequence.py @@ -1,6 +1,6 @@ -from typing import Union import itertools import random +from typing import Union def bin(sequence: Union[str, Union[int, float]], reverse: bool = False): @@ -100,4 +100,4 @@ def imod(mod: int, i: int) -> bool: def pick(*args) -> list: """Alternative function to use random.choice. More terse""" - return choice(list(args)) + return random.choice(list(args)) diff --git a/sardine/sequences/Variables.py b/sardine/sequences/variables.py similarity index 98% rename from sardine/sequences/Variables.py rename to sardine/sequences/variables.py index 4934e049..09bfaccf 100644 --- a/sardine/sequences/Variables.py +++ b/sardine/sequences/variables.py @@ -1,7 +1,9 @@ -from string import ascii_letters from itertools import count +from string import ascii_letters from typing import Union +__all__ = ("Variables",) + class Variables(object): def __init__(self): diff --git a/sardine/sequences/LexerParser/__init__.py b/sardine/sequences/ziffers_parser/__init__.py similarity index 100% rename from sardine/sequences/LexerParser/__init__.py rename to sardine/sequences/ziffers_parser/__init__.py diff --git a/sardine/sequences/ziffers_parser/ziffers.lark b/sardine/sequences/ziffers_parser/ziffers.lark new file mode 100644 index 00000000..e69de29b diff --git a/sardine/sequences/ziffers_parser/ziffers_parser.py b/sardine/sequences/ziffers_parser/ziffers_parser.py new file mode 100644 index 00000000..e69de29b diff --git a/sardine/sequences/ziffers_parser/ziffers_tree.py b/sardine/sequences/ziffers_parser/ziffers_tree.py new file mode 100644 index 00000000..e69de29b diff --git a/sardine/superdirt/__init__.py b/sardine/superdirt/__init__.py index aeefb15c..279ed325 100644 --- a/sardine/superdirt/__init__.py +++ b/sardine/superdirt/__init__.py @@ -1 +1 @@ -from .AutoBoot import * +from .process import * diff --git a/sardine/superdirt/AutoBoot.py b/sardine/superdirt/process.py similarity index 90% rename from sardine/superdirt/AutoBoot.py rename to sardine/superdirt/process.py index 6212ff63..0684c9ee 100644 --- a/sardine/superdirt/AutoBoot.py +++ b/sardine/superdirt/process.py @@ -1,31 +1,36 @@ #!/usr/bin/env python3 -from os import walk, path -from pathlib import Path -import platform, subprocess +import asyncio +import platform import shutil -from typing import Union -from appdirs import * +import subprocess import tempfile +from os import path, walk +from pathlib import Path +from typing import Optional, Union + import psutil -import re -import asyncio +from appdirs import * +from rich import print from rich.console import Console from rich.panel import Panel -from rich import print -__all__ = ("SuperColliderProcess",) +__all__ = ("SuperDirtProcess",) -class SuperColliderProcess: +class SuperDirtProcess: def __init__( - self, startup_file: Union[str, None] = None, preemptive=True, verbose=False + self, startup_file: Optional[str] = None, preemptive=True, verbose=False ): appname, appauthor = "Sardine", "Bubobubobubo" self._user_dir = Path(user_data_dir(appname, appauthor)) self._sclang_path = self.find_sclang_path() self._synth_directory = self._find_synths_directory() - self._startup_file = self._find_startup_file(user_file=startup_file) + self._startup_file = ( + self._find_startup_file(user_file=startup_file) + if startup_file is not None + else None + ) self.temp_file = tempfile.NamedTemporaryFile() self._verbose = verbose @@ -131,7 +136,7 @@ async def monitor(self): where = self.temp_file.tell() lines = self.temp_file.read() if not lines: - await asyncio.sleep(0.05) + await asyncio.sleep(0.1) self.temp_file.seek(where) else: if self._verbose: @@ -173,6 +178,10 @@ def send(self, message: str): """User friendly alias for write_stdin""" self._write_stdin(message) + def trace(self, value: bool = True) -> None: + """Tracing OSC messages sent to SuperCollider (only visible in verbose mode)""" + self._write_stdin(f"OSCFunc.trace({'true' if value else 'false'});") + def meter(self) -> None: """Open SuperCollider VUmeter""" self._write_stdin("s.meter()") @@ -181,6 +190,10 @@ def scope(self) -> None: """Open SuperCollider frequency scope""" self._write_stdin("s.scope()") + def freqscope(self) -> None: + """Open SuperCollider frequency scope""" + self._write_stdin("s.freqscope()") + def meterscope(self) -> None: """Open SuperCollider frequency scope + VUmeter""" self._write_stdin("s.scope(); s.meter()") @@ -188,7 +201,7 @@ def meterscope(self) -> None: def _check_synth_file_extension(self, string: str) -> bool: return string.endswith(".scd") or string.endswith(".sc") - def startup_file_path(self) -> str: + def startup_file_path(self) -> str | None: return self._startup_file def load_custom_synthdefs(self) -> None: @@ -253,7 +266,8 @@ def boot(self) -> None: universal_newlines=True, start_new_session=True, ) - self._write_stdin(message="""load("{}")""".format(self._startup_file)) + if self._startup_file is not None: + self._write_stdin(message="""load("{}")""".format(self._startup_file)) if self._synth_directory is not None: self.load_custom_synthdefs() diff --git a/sardine/utils/Messages.py b/sardine/utils/Messages.py new file mode 100644 index 00000000..0ebb8059 --- /dev/null +++ b/sardine/utils/Messages.py @@ -0,0 +1,32 @@ +from rich.panel import Panel + +sardine_intro = Panel.fit( + """[red] +░██████╗░█████╗░██████╗░██████╗░██╗███╗░░██╗███████╗ +██╔════╝██╔══██╗██╔══██╗██╔══██╗██║████╗░██║██╔════╝ +╚█████╗░███████║██████╔╝██║░░██║██║██╔██╗██║█████╗░░ +░╚═══██╗██╔══██║██╔══██╗██║░░██║██║██║╚████║██╔══╝░░ +██████╔╝██║░░██║██║░░██║██████╔╝██║██║░╚███║███████╗ +╚═════╝░╚═╝░░╚═╝╚═╝░░╚═╝╚═════╝░╚═╝╚═╝░░╚══╝╚══════╝ + +Sardine is a MIDI/OSC sequencer made for live-coding +Play music, read the docs, contribute, and have fun! +WEBSITE: [yellow]https://sardine.raphaelforment.fr[/yellow] +GITHUB: [yellow]https://github.com/Bubobubobubobubo/sardine[/yellow] +[/red]""" +) + + +def _ticked(condition: bool): + """Print an ASCII Art [X] if True or [ ] if false""" + return "[X]" if condition else "[ ]" + + +def config_line_printer(config: dict): + return ( + f"[yellow]BPM: [red]{config.bpm}[/red]," + + f"[yellow]BEATS: [red]{config.beats}[/red] " + + f"[yellow]SC: [red]{_ticked(config.superdirt_handler)}[/red], " + + f"[yellow]DEFER: [red]{_ticked(config.deferred_scheduling)}[/red] " + + f"[yellow]MIDI: [red]{config.midi}[/red]" + ) diff --git a/sardine/utils/__init__.py b/sardine/utils/__init__.py new file mode 100644 index 00000000..35fd460f --- /dev/null +++ b/sardine/utils/__init__.py @@ -0,0 +1,45 @@ +import functools +from typing import TYPE_CHECKING, Callable, ParamSpec, TypeVar, Union + +from .Messages import * + +if TYPE_CHECKING: + from ..base import BaseClock + +P = ParamSpec("P") +T = TypeVar("T") + +MISSING = object() + + +def alias_param(name: str, alias: str): + """ + Alias a keyword parameter in a function. Throws a TypeError when a value is + given for both the original kwarg and the alias. Method taken from + github.com/thegamecracks/abattlemetrics/blob/main/abattlemetrics/client.py + (@thegamecracks). + """ + + def deco(func: Callable[P, T]): + @functools.wraps(func) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: + alias_value = kwargs.pop(alias, MISSING) + if alias_value is not MISSING: + if name in kwargs: + raise TypeError(f"Cannot pass both {name!r} and {alias!r} in call") + kwargs[name] = alias_value + return func(*args, **kwargs) + + return wrapper + + return deco + + +def get_snap_deadline(clock: "BaseClock", offset_beats: Union[float, int]): + next_bar = clock.get_bar_time(1) + offset = clock.get_beat_time(offset_beats, sync=False) + return clock.time + next_bar + offset + + +def plural(n: int, word: str, suffix: str = "s"): + return word if n == 1 else word + suffix diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..6698f37b --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,3 @@ +import sardine + +sardine.event_loop.install_policy() diff --git a/tests/fish_bowl/__init__.py b/tests/fish_bowl/__init__.py new file mode 100644 index 00000000..916b1698 --- /dev/null +++ b/tests/fish_bowl/__init__.py @@ -0,0 +1,134 @@ +import itertools +import math +import time +from typing import ( + Any, + Awaitable, + Callable, + Collection, + Iterator, + NamedTuple, + Optional, + Sequence, + TypeVar, + Union, +) + +import pytest_asyncio +import rich +from rich.table import Column, Table + +from sardine import BaseHandler, FishBowl + +__all__ = ("EventLogEntry", "EventLogHandler", "fish_bowl") + +T = TypeVar("T") + + +class EventLogEntry(NamedTuple): + """An event entry for the `EventLoggingHandler`.""" + + timestamp: float + clock_time: float + event: str + args: tuple[Any, ...] + + +class EventLogHandler(BaseHandler): + """Logs events with timestamps, and optionally according to a whitelist.""" + + def __init__(self, *, whitelist: Optional[Collection[str]] = None): + super().__init__() + self.whitelist = whitelist + self.events: list[EventLogEntry] = [] + + # Analysis methods + + def filter(self, events: Union[str, Collection[str]]) -> Iterator[EventLogEntry]: + if isinstance(events, str): + events = (events,) + + for e in self.events: + if e.event in events: + yield e + + def time(self) -> float: + return time.perf_counter() + + # Handler methods + + def setup(self): + if self.whitelist is not None: + for event in self.whitelist: + self.register(event) + else: + self.register(None) + + def hook(self, event: str, *args): + self.events.append( + EventLogEntry( + timestamp=self.time(), + clock_time=self.env.clock.time, + event=event, + args=args, + ) + ) + + +def _get_last(seq: Sequence[T], default: T) -> T: + return seq[-1] if seq else default + + +class Pauser: + def __init__( + self, + time_func: Callable[[], float], + sleep_func: Callable[[float], Awaitable[Any]], + ): + self.time = time_func + self._sleep = sleep_func + + self.real: list[float] = [] + self.expected: list[float] = [] + + @property + def cumulative_real(self) -> list[float]: + return list(itertools.accumulate(self.real)) + + @property + def cumulative_expected(self) -> list[float]: + return list(itertools.accumulate(self.expected)) + + def assert_equality(self, *, tolerance: float): + self.print_table(tolerance) + for real, expected in zip(self.real, self.expected): + assert math.isclose(real, expected, abs_tol=tolerance) + + def print_table(self, tolerance: Optional[float] = None): + table = Table( + Column("Expected", footer="Tolerance"), + Column("Deviation", footer=f"<{tolerance}"), + show_footer=tolerance is not None, + ) + rows = zip(self.cumulative_expected, self.real, self.expected) + for cumulative, real, expected in rows: + table.add_row(str(cumulative), str(real - expected)) + + rich.print(table) + + async def sleep(self, duration: float, *, accumulate=True) -> float: + start = self.time() + await self._sleep(duration) + elapsed = self.time() - start + + if accumulate: + self.real.append(elapsed) + self.expected.append(duration) + else: + self.real.append(0.0) + self.expected.append(0.0) + + +@pytest_asyncio.fixture +def fish_bowl() -> FishBowl: + return FishBowl() diff --git a/tests/fish_bowl/test_clock.py b/tests/fish_bowl/test_clock.py new file mode 100644 index 00000000..8fe52c1c --- /dev/null +++ b/tests/fish_bowl/test_clock.py @@ -0,0 +1,73 @@ +import asyncio +import math +from typing import Type + +import pytest +import rich +from rich.table import Table + +from sardine import BaseClock, FishBowl, InternalClock, LinkClock + +from . import EventLogHandler, Pauser, fish_bowl + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "clock_type,real_tol,expected_tol", + [ + (InternalClock, 0.00025, 0.024), + (LinkClock, 0.032, 0.034), + ], +) +async def test_clock_sleeping( + clock_type: Type[BaseClock], + real_tol: float, + expected_tol: float, +): + PAUSE_DURATION = 0.1 + ALWAYS_FAIL = False + + fish_bowl = FishBowl(clock=clock_type()) + + end_event = "test_finish" + event_order = ("start", "pause", "resume", "stop", end_event) + + logger = EventLogHandler(whitelist=event_order) + fish_bowl.add_handler(logger) + + pauser = Pauser(logger.time, asyncio.sleep) + + await pauser.sleep(PAUSE_DURATION, accumulate=False) + fish_bowl.start() + + await pauser.sleep(PAUSE_DURATION) + fish_bowl.pause() + + await pauser.sleep(PAUSE_DURATION, accumulate=False) + fish_bowl.resume() + + await pauser.sleep(PAUSE_DURATION) + fish_bowl.stop() + + await pauser.sleep(PAUSE_DURATION, accumulate=False) + fish_bowl.dispatch(end_event) + + assert len(logger.events) == 5 + + table = Table("Clock", "Performance Deviation", "Expected Deviation") + rows = zip(logger.events, pauser.cumulative_real, pauser.cumulative_expected) + for event, real, expected in rows: + clock = event.clock_time + e_dev = clock - expected + r_dev = clock - real + table.add_row(str(clock), str(r_dev), str(e_dev)) + table.add_section() + table.add_row("Tolerance", f"<{real_tol}", f"<{expected_tol}") + rich.print(table) + + rows = zip(logger.events, pauser.cumulative_real, pauser.cumulative_expected) + for event, rt, et in rows: + assert math.isclose(event.clock_time, et, abs_tol=expected_tol) + assert math.isclose(event.clock_time, rt, abs_tol=real_tol) + + assert not ALWAYS_FAIL, "ALWAYS_FAIL is enabled" diff --git a/tests/fish_bowl/test_hooks.py b/tests/fish_bowl/test_hooks.py new file mode 100644 index 00000000..4b218d42 --- /dev/null +++ b/tests/fish_bowl/test_hooks.py @@ -0,0 +1,89 @@ +from typing import Any, Optional + +import pytest + +from sardine import BaseHandler, FishBowl + +from . import fish_bowl + + +class DummyHandler(BaseHandler): + EVENTS = ("foo", "bar", "foo") + + def __init__(self): + super().__init__() + self.has_setup = False + self.has_teared_down = False + self.last_event: Optional[tuple[str, tuple[Any, ...]]] = None + self.event_count = 0 + + def setup(self): + self.has_setup = True + for event in self.EVENTS: + self.register(event) + + def teardown(self): + self.has_teared_down = True + + def reset_event_count(self): + self.event_count = 0 + + def hook(self, event: str, *args): + self.last_event = (event, args) + self.event_count += 1 + + +@pytest.fixture +def dummy_handler() -> DummyHandler: + return DummyHandler() + + +def test_handler(fish_bowl: FishBowl, dummy_handler: DummyHandler): + temp_event = "baz" + + # Ensure test hooks aren't in use + for event in dummy_handler.EVENTS + (temp_event,): + assert fish_bowl._event_hooks.get(event) is None + + # Add handler and check for setup call + fish_bowl.add_handler(dummy_handler) + assert dummy_handler.has_setup + + assert dummy_handler.env is fish_bowl + + # Verify installation of hooks + for event in dummy_handler.EVENTS: + assert dummy_handler in fish_bowl._event_hooks[event] + + # NOTE: we are expecting insertion order + for event, expected_event in zip( + dummy_handler.EVENTS, fish_bowl._hook_events[dummy_handler] + ): + assert event == expected_event + + # Test each hook + for i, event in enumerate(dummy_handler.EVENTS): + fish_bowl.dispatch(event, i) + assert dummy_handler.last_event == (event, (i,)) + + # Test global hook + dummy_handler.reset_event_count() + dummy_handler.register(None) + + fish_bowl.dispatch(temp_event) + assert dummy_handler.last_event == (temp_event, ()) + + # Make sure hooks aren't called twice with existing events + existing_event = dummy_handler.EVENTS[0] + fish_bowl.dispatch(existing_event) + assert dummy_handler.last_event == (existing_event, ()) + assert dummy_handler.event_count == 2 + + # Verify removal of hooks + fish_bowl.remove_handler(dummy_handler) + assert dummy_handler.has_teared_down + + for event in dummy_handler.EVENTS: + assert fish_bowl._event_hooks.get(event) is None + + assert fish_bowl._hook_events.get(dummy_handler) is None diff --git a/tests/fish_bowl/test_sleep.py b/tests/fish_bowl/test_sleep.py new file mode 100644 index 00000000..ed40ee45 --- /dev/null +++ b/tests/fish_bowl/test_sleep.py @@ -0,0 +1,29 @@ +import time + +import pytest + +from sardine import FishBowl, InternalClock + +from . import Pauser, fish_bowl + + +@pytest.mark.asyncio +async def test_sleep_internal_clock(fish_bowl: FishBowl): + PAUSE_DURATION = 0.02 + ITERATIONS = 10 + TOLERANCE = 0.016 + ALWAYS_FAIL = False + + assert isinstance(fish_bowl.clock, InternalClock) + assert fish_bowl.clock.can_sleep() + + pauser = Pauser(time.perf_counter, fish_bowl.sleep) + + fish_bowl.start() + for _ in range(ITERATIONS): + await pauser.sleep(PAUSE_DURATION) + fish_bowl.stop() + + pauser.assert_equality(tolerance=TOLERANCE) + + assert not ALWAYS_FAIL, "ALWAYS_FAIL is enabled" diff --git a/tests/fish_bowl/test_transports.py b/tests/fish_bowl/test_transports.py new file mode 100644 index 00000000..763ea521 --- /dev/null +++ b/tests/fish_bowl/test_transports.py @@ -0,0 +1,36 @@ +import pytest + +from sardine import FishBowl + +from . import EventLogHandler, fish_bowl + + +@pytest.mark.asyncio +async def test_transports(fish_bowl: FishBowl): + logger = EventLogHandler( + whitelist=("start", "stop", "pause", "resume"), + ) + fish_bowl.add_handler(logger) + + # No-ops + fish_bowl.stop() + fish_bowl.pause() + fish_bowl.resume() + + # Regular + fish_bowl.start() + fish_bowl.pause() + fish_bowl.stop() + + fish_bowl.start() + fish_bowl.resume() # no-op + fish_bowl.pause() + fish_bowl.stop() + + event_names = [e.event for e in logger.events] + # fmt: off + assert event_names == [ + "start", "pause", "stop", + "start", "pause", "stop", + ] + # fmt: on diff --git a/tests/handlers/__init__.py b/tests/handlers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/handlers/test_child.py b/tests/handlers/test_child.py new file mode 100644 index 00000000..8907e894 --- /dev/null +++ b/tests/handlers/test_child.py @@ -0,0 +1,61 @@ +import pytest + +from sardine import BaseHandler, FishBowl + + +def test_basic_children(): + group = BaseHandler() + a = BaseHandler() + b = BaseHandler() + + group.add_child(a) + group.add_child(b) + assert group.children == [a, b] + assert a.parent is group and b.parent is group + + # no-ops + group.add_child(a) + group.remove_child(group) + + with pytest.raises(ValueError): + group.add_child(group) + + group.remove_child(a) + group.remove_child(b) + assert group.children == [] + assert a.parent is None and b.parent is None + + +def test_child_locks(): + bowl = FishBowl() + + root = BaseHandler(lock_children=True) + a = BaseHandler() + a_a = BaseHandler() + b = BaseHandler(lock_children=False) + b_b = BaseHandler() + handlers = (root, a, a_a, b, b_b) + + a.add_child(a_a) + b.add_child(b_b) + root.add_child(a) + root.add_child(b) + + bowl.add_handler(root) + bowl_handlers = bowl.handlers + assert all(h in bowl_handlers for h in handlers) + + with pytest.raises(ValueError): + bowl.remove_handler(a) + + with pytest.raises(ValueError): + bowl.remove_handler(a_a) + + with pytest.raises(ValueError): + bowl.remove_handler(b) + + bowl.remove_handler(b_b) + + bowl.remove_handler(root) + bowl_handlers = bowl.handlers + assert all(h not in bowl_handlers for h in handlers) diff --git a/tests/handlers/test_osc.py b/tests/handlers/test_osc.py new file mode 100644 index 00000000..70a05579 --- /dev/null +++ b/tests/handlers/test_osc.py @@ -0,0 +1 @@ +# TODO test_osc.py diff --git a/tests/test_pattern_parsing.py b/tests/test_pattern_parsing.py deleted file mode 100644 index 67859a85..00000000 --- a/tests/test_pattern_parsing.py +++ /dev/null @@ -1,244 +0,0 @@ -import unittest -from sardine.sequences.LexerParser.ListParser import ListParser -from typing import Union -from unittest import mock - - -PARSER = ListParser(None, None, None) - - -class TestPatternParsing(unittest.TestCase): - def test_silence(self): - """ - Test the silence operator (.) - """ - parser = PARSER - patterns = [ - ".", - ".!4", - ".?", - ] - expected = [[None], [None] * 4, [None]] - for i, pattern in enumerate(patterns): - with self.subTest(i=i, pattern=pattern): - self.assertEqual(parser.parse(pattern), expected[i]) - - def test_choice_operator(self): - """ - Test the choice (|) operator - """ - parser = PARSER - patterns = ["1|2|3|4", "[1,2,3,4]|[., .]", "baba|dada", "(baba:2)|(dada:4)"] - expected = [ - [[1], [2], [3], [4]], - [[1, 2, 3, 4], [None, None]], - [["baba"], ["dada"]], - [["baba:2"], ["dada:4"]], - ] - for i, pattern in enumerate(patterns): - with self.subTest(i=i, pattern=pattern): - self.assertIn(parser.parse(pattern), expected[i]) - - def test_presence_operator(self): - """ - Test the presence operator (?) that can make things disappear - 50% of the time - """ - parser = PARSER - patterns = [ - "[1,2,3,4,5]?", - "1?,2?,3?,4?", - ] - expected_true = [[1, 2, 3, 4, 5], [1, 2, 3, 4]] - expected_false = [[None] * 5, [None] * 4] - for i, pattern in enumerate(patterns): - with self.subTest(i=i, pattern=pattern): - mocked_random_choice = lambda: 1.0 - with mock.patch("random.random", mocked_random_choice): - result = parser.parse(pattern) - self.assertEqual(expected_true[i], result) - mocked_random_choice = lambda: 0.0 - with mock.patch("random.random", mocked_random_choice): - result = parser.parse(pattern) - self.assertEqual(expected_false[i], result) - - def test_number_pattern(self): - """ - Test parsing several patterns composed of numbers and simple math operations. - """ - parser = PARSER - patterns = [ - ".5", - "0.5", - "1, 2, 3", - "1+1, 2*3, 4-1, 5/2", - ] - expected = [ - [0.5], - [0.5], - [1, 2, 3], - [2, 6, 3, 2.5], - ] - for i, pattern in enumerate(patterns): - with self.subTest(i=i, pattern=pattern): - result = parser.parse(pattern) - self.assertEqual(len(result), len(expected[i])) - for x, y in zip(result, expected[i]): - self.assertAlmostEqual(x, y) - - def test_list_arithmetic(self): - """ - Test parsing several patterns composed of numbers and simple math operations. - """ - parser = PARSER - patterns = [ - "[1,2,3]+1, [1,2,3]*2", - "[1,2,3]/2, [1,2,3]//2", - "[2,3,4]-2, [2,3,4]%2", - "[1,2,3,4]+[1,2,3,4]", - "[1,2,3,4]*[1,2,3,4]", - "[1,2,3,4]/[1,2,3,4]", - "[1,2,3,4]/[2,3,4,5]", - "[2,4,6,8]%[12,8]", - ] - expected = [ - [2, 3, 4, 2, 4, 6], - [0.5, 1.0, 1.5, 0, 1, 1], - [0, 1, 2, 0, 1, 0], - [2, 4, 6, 8], - [1, 4, 9, 16], - [1.0, 1.0, 1.0, 1.0], - [0.5, 0.6666666666666666, 0.75, 0.8], - [2, 4, 6, 0], - ] - for i, pattern in enumerate(patterns): - with self.subTest(i=i, pattern=pattern): - result = parser.parse(pattern) - self.assertEqual(len(result), len(expected[i])) - for x, y in zip(result, expected[i]): - self.assertAlmostEqual(x, y) - - def test_notes(self): - """ - Test parsing simple note composition - """ - parser = PARSER - patterns = [ - "C,D,E,F,G,A,B", - "Do,Re,Mi,Fa,Sol,La,Si", - "Do,Ré,Mi,Fa,Sol,La,Si", - "C0,C1,C2,C3,C4,C5,C6,C7,C8,C9", - "C, C#, Cb", - "C, Eb, G", - "C, C., C.., C...", - "C, C', C'', C'''", - "C@maj7, C@min7", - ] - expected = [ - [60, 62, 64, 65, 67, 69, 71], - [60, 62, 64, 65, 67, 69, 71], - [60, 62, 64, 65, 67, 69, 71], - [12, 24, 36, 48, 60, 72, 84, 96, 108, 120], - [60, 61, 59], - [60, 63, 67], - [60, 48, 36, 24], - [60, 72, 84, 96], - [60, 64, 67, 71, 60, 63, 67, 70], - ] - for i, pattern in enumerate(patterns): - with self.subTest(i=i, pattern=pattern): - result = parser.parse(pattern) - self.assertEqual(len(result), len(expected[i])) - for x, y in zip(result, expected[i]): - self.assertEqual(x, y) - - def test_integer_ranges(self): - """ - Test parsing integer ranges - """ - - def in_range(test_range: list, y: Union[int, float]) -> bool: - return y in test_range - - parser = PARSER - patterns = [ - "0~1", - "0~10", - "100~200", - ] - - expected = [list(range(0, 2)), list(range(0, 11)), list(range(100, 201))] - - for i, pattern in enumerate(patterns): - with self.subTest(i=i, pattern=pattern): - result = parser.parse(pattern)[0] - self.assertTrue(in_range(expected[i], y=result)) - - def test_list_expansion(self): - """ - Test the ! and !! operators for expanding lists - """ - - parser = PARSER - patterns = [ - "[1,2]!2", - "[1,2]!!2", - "[1,.]!2", - "[1,.]!!2", - ] - - expected = [ - [1, 2, 1, 2], - [1, 1, 2, 2], - [1, None, 1, None], - [1, 1, None, None], - ] - - for i, pattern in enumerate(patterns): - with self.subTest(i=i, pattern=pattern): - result = parser.parse(pattern) - self.assertEqual(expected[i], result) - - def test_negation(self): - """ - Test the ! and !! operators for expanding lists - """ - - parser = PARSER - patterns = [ - "-1", - "-22.231", - ] - - expected = [ - [-1], - [-22.231], - ] - - for i, pattern in enumerate(patterns): - with self.subTest(i=i, pattern=pattern): - result = parser.parse(pattern) - self.assertEqual(expected[i], result) - - def test_ramps(self): - """ - Test ramps with and without step - """ - parser = PARSER - patterns = ["[1:5]", "[0:1,.3]", "[10:8,.5]", "0, [1:3], 4, 5"] - expected = [ - [1, 2, 3, 4, 5], - [0, 0.3, 0.6, 0.9], - [10, 9.5, 9, 8.5, 8], - [0, 1, 2, 3, 4, 5], - ] - for i, pattern in enumerate(patterns): - with self.subTest(i=i, pattern=pattern): - result = parser.parse(pattern) - self.assertEqual(len(result), len(expected[i])) - for x, y in zip(result, expected[i]): - self.assertAlmostEqual(x, y) - - -if __name__ == "__main__": - unittest.main(verbosity=2) diff --git a/tests/test_patterns.py b/tests/test_patterns.py new file mode 100644 index 00000000..1a4f449a --- /dev/null +++ b/tests/test_patterns.py @@ -0,0 +1,137 @@ +import random + +import pytest +import pytest_asyncio + +from sardine import FishBowl +from sardine.sequences import ListParser + + +# NOTE: only put new parsers here if they support sardine's patterning syntax +@pytest_asyncio.fixture(scope="module", params=[ListParser]) +def fish_bowl(request: pytest.FixtureRequest): + return FishBowl(parser=request.param()) + + +@pytest.mark.parametrize( + "pattern,expected", + [ + (".", [None]), + (".!4", [None] * 4), + ], +) +def test_silence_op(fish_bowl: FishBowl, pattern: str, expected: list): + assert fish_bowl.parser.parse(pattern) == expected + + +@pytest.mark.parametrize( + "pattern,expected", + [ + ("1|2|3|4", [[1], [2], [3], [4]]), + ("[1,2,3,4]|[., .]", [[1, 2, 3, 4], [None, None]]), + ("baba|dada", [["baba"], ["dada"]]), + ("(baba:2)|(dada:4)", [["baba:2"], ["dada:4"]]), + ], +) +def test_choice_op(fish_bowl: FishBowl, pattern: str, expected: list): + assert fish_bowl.parser.parse(pattern) in expected + + +@pytest.mark.parametrize( + "pattern,expected", + [ + (".5", [0.5]), + ("0.5", [0.5]), + ("1, 2, 3", [1, 2, 3]), + ("1+1, 2*3, 4-1, 5/2", [2, 6, 3, 2.5]), + ], +) +def test_number_pattern(fish_bowl: FishBowl, pattern: str, expected: list): + """ + Test parsing several patterns composed of numbers and simple math operations. + """ + assert fish_bowl.parser.parse(pattern) == pytest.approx(expected) + + +@pytest.mark.parametrize( + "pattern,expected", + [ + ("[1,2,3]+1, [1,2,3]*2", [2, 3, 4, 2, 4, 6]), + ("[1,2,3]/2, [1,2,3]//2", [0.5, 1.0, 1.5, 0, 1, 1]), + ("[2,3,4]-2, [2,3,4]%2", [0, 1, 2, 0, 1, 0]), + ("[1,2,3,4]+[1,2,3,4]", [2, 4, 6, 8]), + ("[1,2,3,4]*[1,2,3,4]", [1, 4, 9, 16]), + ("[1,2,3,4]/[1,2,3,4]", [1.0, 1.0, 1.0, 1.0]), + ("[1,2,3,4]/[2,3,4,5]", [0.5, 2 / 3, 0.75, 0.8]), + ("[2,4,6,8]%[12,8]", [2, 4, 6, 0]), + ], +) +def test_list_arithmetic(fish_bowl: FishBowl, pattern: str, expected: list): + assert fish_bowl.parser.parse(pattern) == pytest.approx(expected) + + +@pytest.mark.parametrize( + "pattern,expected", + [ + ("C,D,E,F,G,A,B", [60, 62, 64, 65, 67, 69, 71]), + ("Do,Re,Mi,Fa,Sol,La,Si", [60, 62, 64, 65, 67, 69, 71]), + ("Do,Ré,Mi,Fa,Sol,La,Si", [60, 62, 64, 65, 67, 69, 71]), + ("C0,C1,C2,C3,C4,C5,C6,C7,C8,C9", [12, 24, 36, 48, 60, 72, 84, 96, 108, 120]), + ("C, C#, Cb", [60, 61, 59]), + ("C, Eb, G", [60, 63, 67]), + ("C, C., C.., C...", [60, 48, 36, 24]), + ("C, C', C'', C'''", [60, 72, 84, 96]), + ("C@maj7, C@min7", [60, 64, 67, 71, 60, 63, 67, 70]), + ], +) +def test_note_compositions(fish_bowl: FishBowl, pattern: str, expected: list): + assert fish_bowl.parser.parse(pattern) == expected + + +@pytest.mark.parametrize( + "pattern,expected_range", + [ + ("0~1", range(0, 2)), + ("0~10", range(0, 11)), + ("100~200", range(100, 201)), + ], +) +def test_integer_ranges(fish_bowl: FishBowl, pattern: str, expected_range: list): + assert fish_bowl.parser.parse(pattern)[0] in expected_range + + +@pytest.mark.parametrize( + "pattern,expected", + [ + ("[1,2]!2", [1, 2, 1, 2]), + ("[1,2]!!2", [1, 1, 2, 2]), + ("[1,.]!2", [1, None, 1, None]), + ("[1,.]!!2", [1, 1, None, None]), + ], +) +def test_list_expansion(fish_bowl: FishBowl, pattern: str, expected: list): + assert fish_bowl.parser.parse(pattern) == expected + + +@pytest.mark.parametrize( + "pattern,expected", + [ + ("-1", [-1]), + ("-22.231", [-22.231]), + ], +) +def test_negation(fish_bowl: FishBowl, pattern: str, expected: list): + assert fish_bowl.parser.parse(pattern) == expected + + +@pytest.mark.parametrize( + "pattern,expected", + [ + ("[1:5]", [1, 2, 3, 4, 5]), + ("[0:1,.3]", [0, 0.3, 0.6, 0.9]), + ("[10:8,.5]", [10, 9.5, 9, 8.5, 8]), + ("0, [1:3], 4, 5", [0, 1, 2, 3, 4, 5]), + ], +) +def test_ramps(fish_bowl: FishBowl, pattern: str, expected: list): + assert fish_bowl.parser.parse(pattern) == pytest.approx(expected)