diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..329b40dc --- /dev/null +++ b/.gitattributes @@ -0,0 +1,5 @@ +/debian export-ignore +/.git export-ignore +.gitignore export-ignore +.gitattributes export-ignore +.travis.yml export-ignore diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..ca33b927 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,57 @@ +name: CI tests + +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + lint: + runs-on: ubuntu-20.04 + steps: + - name: Repository checkout + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.6" + + - name: Install dependencies + run: | + pip install --upgrade pip tox + + - name: Lint + run: | + tox -e lint + + test: + runs-on: ubuntu-20.04 + strategy: + matrix: + python-version: + - "3.6" + - "3.7" + - "3.8" + - "3.9" + - "3.10" + steps: + - name: Repository checkout + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + pip install --upgrade pip tox codecov + + - name: Test + run: | + tox -e py3 + codecov diff --git a/.github/workflows/cla-check.yml b/.github/workflows/cla-check.yml new file mode 100644 index 00000000..6b767cce --- /dev/null +++ b/.github/workflows/cla-check.yml @@ -0,0 +1,10 @@ +name: cla-check + +on: [pull_request] + +jobs: + cla-check: + runs-on: ubuntu-latest + steps: + - name: Check if CLA signed + uses: canonical/has-signed-canonical-cla@v2 diff --git a/.github/workflows/stale-cron.yaml b/.github/workflows/stale-cron.yaml new file mode 100644 index 00000000..f5579966 --- /dev/null +++ b/.github/workflows/stale-cron.yaml @@ -0,0 +1,9 @@ +name: Close inactive issues +on: + schedule: + - cron: "0 0 * * *" + +jobs: + close-issues: + uses: canonical/maas-github-workflows/.github/workflows/stale-cron.yaml@v0 + secrets: inherit diff --git a/.gitignore b/.gitignore index baa9aeb4..3451c7f4 100644 --- a/.gitignore +++ b/.gitignore @@ -26,6 +26,7 @@ bin/ include/ local/ share/ +*.snap # PyInstaller # Usually these files are written by a python script from a template @@ -63,3 +64,38 @@ target/ # mkdocs site/ + +# Packaging +.pc/ + +# Editors +.vscode/ + +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/ + + +# CMake +cmake-build-*/ + + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 94b7aa83..00000000 --- a/.travis.yml +++ /dev/null @@ -1,16 +0,0 @@ -language: python -python: - - "3.5" - -install: - - pip install codecov tox - -script: - - tox -e py35,lint - -after_success: - - codecov --env TRAVIS_PYTHON_VERSION - -branches: - only: - - master diff --git a/Makefile b/Makefile index 6f5a2c5b..774315ab 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,10 @@ -PYTHON := python3.5 +python := python3 +snapcraft := SNAPCRAFT_BUILD_INFO=1 /snap/bin/snapcraft + +# --- + +install-dependencies: + if [ -x /usr/bin/snap ]; then sudo snap install --classic snapcraft; fi # --- @@ -8,8 +14,9 @@ develop: bin/python setup.py dist: bin/python setup.py README bin/python setup.py sdist bdist_wheel -upload: bin/python setup.py README - bin/python setup.py sdist bdist_wheel upload +upload: bin/python bin/twine setup.py README + bin/python setup.py sdist bdist_wheel + bin/twine upload dist/* test: bin/tox @bin/tox @@ -17,6 +24,9 @@ test: bin/tox integrate: bin/tox @bin/tox -e integrate +format: bin/tox + @bin/tox -e format,imports + lint: bin/tox @bin/tox -e lint,imports @@ -32,6 +42,14 @@ clean: # --- +snap-clean: + $(snapcraft) clean + +snap: + $(snapcraft) + +# --- + README: README.md pandoc --from markdown --to rst --output $@ $^ @@ -47,11 +65,14 @@ bin/tox: bin/pip bin/pip install --quiet --ignore-installed tox bin/python bin/pip: - virtualenv --python=$(PYTHON) --quiet $(CURDIR) + virtualenv --python=$(python) --quiet $(CURDIR) bin/mkdocs: bin/pip bin/pip install --quiet --ignore-installed "mkdocs >= 0.14.0" +bin/twine: bin/pip + bin/pip install --quiet --ignore-installed twine + # --- api-json-raw := $(wildcard maas/client/bones/testing/*.raw.json) @@ -67,4 +88,4 @@ pretty: $(api-json) # --- -.PHONY: develop dist docs docs-to-github test integrate lint clean pretty +.PHONY: install-dependencies develop dist docs docs-to-github test integrate lint clean pretty snap snap-clean diff --git a/README b/README index 7fe8f894..3b292a58 100644 --- a/README +++ b/README @@ -1,66 +1,55 @@ -python-libmaas -============== +# python-libmaas -Python client API library made especially for -`MAAS `__. +Python client API library made especially for [MAAS][]. -|Build Status| |codecov.io| +[![CI tests](https://github.com/canonical/python-libmaas/workflows/CI%20tests/badge.svg)](https://github.com/canonical/python-libmaas/actions?query=workflow%3A%22CI+tests%22) +[![codecov.io](https://codecov.io/github/canonical/python-libmaas/coverage.svg?branch=master)](https://codecov.io/github/maas/python-libmaas?branch=master) -Installation ------------- -All the dependencies are declared in ``setup.py`` so this can be -installed with `pip `__. Python 3.5 is required. +## Installation -When working from trunk it can be helpful to use ``virtualenv``: +All the dependencies are declared in `setup.py` so this can be installed +with [pip](https://pip.pypa.io/). Python 3.5+ is required. -:: +When working from master it can be helpful to use a virtualenv: - $ virtualenv --python=python3.5 amc && source amc/bin/activate - $ pip install git+https://github.com/maas/python-libmaas.git + $ python3 -m venv ve && source ve/bin/activate + $ pip install git+https://github.com/canonical/python-libmaas.git $ maas --help -Releases are periodically made to `PyPI `__ -but, at least for now, it makes more sense to work directly from trunk. +Releases are periodically made to [PyPI](https://pypi.python.org/) but, +at least for now, it makes more sense to work directly from trunk. -Documentation -------------- -Documentation can be generated with ``make docs`` which publishes into -the ``site`` directory. Recent documentation is also published to the -`MAAS Client Library & CLI -documentation `__ site. +## Documentation -Development ------------ +Documentation can be generated with `make docs` which publishes into the +`site` directory. Recent documentation is also published to the +[MAAS Client Library & CLI documentation][docs] site. -It's pretty easy to start hacking on *python-libmaas*: -:: +## Development + +It's pretty easy to start hacking on _python-libmaas_: $ git clone git@github.com:maas/python-libmaas.git $ cd python-libmaas $ make develop $ make test -Installing `IPython `__ is generally a good idea -too: - -:: +Installing [IPython][] is generally a good idea too: $ bin/pip install -UI IPython -Pull requests are welcome but authors need to sign the `Canonical -contributor license -agreement `__ before those -PRs can be merged. +Pull requests are welcome but authors need to sign the [Canonical +contributor license agreement][CCLA] before those PRs can be merged. + -History & licence ------------------ +## History & licence -In short: `AGPLv3 `__. +In short: [AGPLv3][]. -*python-libmaas* was begun by a core MAAS developer, Gavin Panella, on +_python-libmaas_ was begun by a core MAAS developer, Gavin Panella, on his own time, but is now maintained by the core MAAS team at Canonical. It is licensed under the GNU Affero GPLv3, the same as MAAS itself. @@ -68,7 +57,11 @@ Some of the code in here has come from MAAS, upon which Canonical Ltd has the copyright. Gavin Panella licenses his parts under the AGPLv3, and MAAS is also under the AGPLv3, so everything should be good. -.. |Build Status| image:: https://travis-ci.org/maas/python-libmaas.svg?branch=master - :target: https://travis-ci.org/maas/python-libmaas -.. |codecov.io| image:: https://codecov.io/github/maas/python-libmaas/coverage.svg?branch=master - :target: https://codecov.io/github/maas/python-libmaas?branch=master + +[MAAS]: https://maas.io/ +[docs]: http://maas.github.io/python-libmaas/ + +[CCLA]: https://www.ubuntu.com/legal/contributors +[AGPLv3]: https://www.gnu.org/licenses/agpl-3.0.html + +[IPython]: https://ipython.org/ diff --git a/README.md b/README.md deleted file mode 100644 index 9f9f08b3..00000000 --- a/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# python-libmaas - -Python client API library made especially for [MAAS][]. - -[![Build Status](https://travis-ci.org/maas/python-libmaas.svg?branch=master)](https://travis-ci.org/maas/python-libmaas) -[![codecov.io](https://codecov.io/github/maas/python-libmaas/coverage.svg?branch=master)](https://codecov.io/github/maas/python-libmaas?branch=master) - - -## Installation - -All the dependencies are declared in `setup.py` so this can be installed -with [pip](https://pip.pypa.io/). Python 3.5 is required. - -When working from trunk it can be helpful to use `virtualenv`: - - $ virtualenv --python=python3.5 amc && source amc/bin/activate - $ pip install git+https://github.com/maas/python-libmaas.git - $ maas --help - -Releases are periodically made to [PyPI](https://pypi.python.org/) but, -at least for now, it makes more sense to work directly from trunk. - - -## Documentation - -Documentation can be generated with `make docs` which publishes into the -`site` directory. Recent documentation is also published to the -[MAAS Client Library & CLI documentation][docs] site. - - -## Development - -It's pretty easy to start hacking on _python-libmaas_: - - $ git clone git@github.com:maas/python-libmaas.git - $ cd python-libmaas - $ make develop - $ make test - -Installing [IPython][] is generally a good idea too: - - $ bin/pip install -UI IPython - -Pull requests are welcome but authors need to sign the [Canonical -contributor license agreement][CCLA] before those PRs can be merged. - - -## History & licence - -In short: [AGPLv3][]. - -_python-libmaas_ was begun by a core MAAS developer, Gavin Panella, on -his own time, but is now maintained by the core MAAS team at Canonical. -It is licensed under the GNU Affero GPLv3, the same as MAAS itself. - -Some of the code in here has come from MAAS, upon which Canonical Ltd -has the copyright. Gavin Panella licenses his parts under the AGPLv3, -and MAAS is also under the AGPLv3, so everything should be good. - - -[MAAS]: https://maas.io/ -[docs]: http://maas.github.io/python-libmaas/ - -[CCLA]: https://www.ubuntu.com/legal/contributors -[AGPLv3]: https://www.gnu.org/licenses/agpl-3.0.html - -[IPython]: https://ipython.org/ diff --git a/README.md b/README.md new file mode 120000 index 00000000..100b9382 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +README \ No newline at end of file diff --git a/debian/changelog b/debian/changelog new file mode 100644 index 00000000..98e04531 --- /dev/null +++ b/debian/changelog @@ -0,0 +1,14 @@ +python-libmaas (0.6.0-0ubuntu1) bionic; urgency=medium + + * New upstream release + * d/p/{00-disable_bson_install_requires,01-fix_setup_py_lists}.patch: Drop. + merged upstream. + * debian/watch: Cleanup unneeded comments. + + -- Andres Rodriguez Tue, 06 Feb 2018 21:15:00 -0500 + +python-libmaas (0.5.0-0ubuntu1) bionic; urgency=medium + + * Initial release (LP: #1747328) + + -- Andres Rodriguez Sun, 04 Feb 2018 20:45:42 -0500 diff --git a/debian/compat b/debian/compat new file mode 100644 index 00000000..f599e28b --- /dev/null +++ b/debian/compat @@ -0,0 +1 @@ +10 diff --git a/debian/control b/debian/control new file mode 100644 index 00000000..40fe1c62 --- /dev/null +++ b/debian/control @@ -0,0 +1,26 @@ +Source: python-libmaas +Section: python +Priority: optional +Maintainer: Andres Rodriguez +Build-Depends: debhelper (>= 10), dh-python, python3-all, python3-setuptools +Standards-Version: 4.1.3 +Homepage: https://github.com/canonical/python-libmaas +X-Python3-Version: >= 3.2 + +Package: python3-libmaas +Architecture: all +Depends: python3-aiohttp, + python3-argcomplete, + python3-bson, + python3-colorclass, + python3-oauthlib, + python3-tz, + python3-yaml, + python3-terminaltables, + ${python3:Depends}, + ${misc:Depends} +Description: MAAS asyncio client library (Python 3) + The MAAS Python Client library provides an asyncio based library + to interact with MAAS. + . + This package installs the library for Python 3. diff --git a/debian/copyright b/debian/copyright new file mode 100644 index 00000000..8af8f6a0 --- /dev/null +++ b/debian/copyright @@ -0,0 +1,26 @@ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: python-libmaas +Source: https://github.com/canonical/python-libmaas + +Files: * +Copyright: 2017-2018 Canonical Ltd. +License: AGPL-3.0+ + +Files: debian/* +Copyright: 2018 Andres Rodriguez + 2018 Canonical Ltd. +License: AGPL-3.0+ + +License: AGPL-3.0+ + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as + published by the Free Software Foundation, either version 3 of the + License, or (at your option) any later version. + . + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + . + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . diff --git a/debian/files b/debian/files new file mode 100644 index 00000000..1ee54ad2 --- /dev/null +++ b/debian/files @@ -0,0 +1 @@ +python-libmaas_0.6.0-0ubuntu1_source.buildinfo python optional diff --git a/debian/patches/series b/debian/patches/series new file mode 100644 index 00000000..e69de29b diff --git a/debian/rules b/debian/rules new file mode 100755 index 00000000..8cddcb21 --- /dev/null +++ b/debian/rules @@ -0,0 +1,24 @@ +#!/usr/bin/make -f +# See debhelper(7) (uncomment to enable) +# output every command that modifies files on the build system. +#export DH_VERBOSE = 1 + +export PYBUILD_NAME=python-libmaas + +%: + dh $@ --with python3 --buildsystem=pybuild + + +override_dh_auto_install: + dh_auto_install + + # Remove binary that's created by the setup.py + rm -rf $(CURDIR)/debian/python3-libmaas/usr/bin/maas + +override_dh_auto_build: + # Do nothing. We have nothing to build, hence disabling + # the build process. + +override_dh_auto_test: + # Do nothing. Tests require running daemons, hence + # disable them during packaging building. diff --git a/debian/source/format b/debian/source/format new file mode 100644 index 00000000..163aaf8d --- /dev/null +++ b/debian/source/format @@ -0,0 +1 @@ +3.0 (quilt) diff --git a/debian/source/options b/debian/source/options new file mode 100644 index 00000000..cb61fa52 --- /dev/null +++ b/debian/source/options @@ -0,0 +1 @@ +extend-diff-ignore = "^[^/]*[.]egg-info/" diff --git a/debian/watch b/debian/watch new file mode 100644 index 00000000..929cbb7a --- /dev/null +++ b/debian/watch @@ -0,0 +1,10 @@ +# Compulsory line, this is a version 4 file +version=4 + +# PGP signature mangle, so foo.tar.gz has foo.tar.gz.sig +#opts="pgpsigurlmangle=s%$%.sig%" + +# GitHub hosted projects +opts="filenamemangle=s%(?:.*?)?v?(\d[\d.]*)\.tar\.gz%python-libmaas-$1.tar.gz%" \ + https://github.com/canonical/python-libmaas/tags \ + (?:.*?/)?v?(\d[\d.]*)\.tar\.gz debian uupdate diff --git a/doc.yaml b/doc.yaml index d717a6b1..d39de881 100644 --- a/doc.yaml +++ b/doc.yaml @@ -3,7 +3,7 @@ markdown_extensions: - codehilite - sane_lists - smarty -repo_url: https://github.com/maas/python-libmaas +repo_url: https://github.com/canonical/python-libmaas site_name: MAAS Client Library & CLI strict: true theme: readthedocs @@ -17,14 +17,6 @@ pages: - Interfaces: client/interfaces.md - Events: client/events.md - Others: client/other.md - - Internals: - - Bones: bones/index.md - - Viscera: - - Introduction: viscera/index.md - - Getting started: viscera/getting-started.md - - Nodes: viscera/nodes.md - - Events: viscera/events.md - - Others: viscera/other.md - Development: - Release checklist: development/releasing.md - Adding an object: development/adding-an-object.md diff --git a/doc/bones/index.md b/doc/bones/index.md deleted file mode 100644 index 752c3c03..00000000 --- a/doc/bones/index.md +++ /dev/null @@ -1,50 +0,0 @@ -

bones — Low-level Python client API

- -You may prefer the [higher-level API _viscera_](../viscera/index.md), -but maybe you need to do something that you can't do in _viscera_ yet -(please file a bug!), or you're developing _viscera_ itself (which uses -_bones_ behind the scenes). - - -## Some example code - -```python -#!/usr/bin/env python3.5 - -from http import HTTPStatus -from pprint import pprint -from maas.client import bones - -# Replace "a:b:c" with an API key obtained from the UI at -# http://$host:$port/MAAS/account/prefs/. -profile, session = bones.SessionAPI.connect( - "http://localhost:5240/MAAS/", apikey="a:b:c") - -# Create a tag if it doesn't exist. -tag_name = "gryphon" -tag_comment = "Gryphon's Stuff" -try: - tag = session.Tag.read(name=tag_name) -except bones.CallError as error: - if error.status == HTTPStatus.NOT_FOUND: - tag = session.Tags.new( - name=tag_name, comment=tag_comment) - else: - raise - -# List all the tags. -print(">>> Tags.list()") -pprint(session.Tags.list()) - -# Get the system IDs for all nodes. -print(">>> Nodes.list()") -all_nodes_system_ids = [ - node["system_id"] for node in session.Nodes.list() -] -pprint(all_nodes_system_ids) - -# Associate the tag with all nodes. -print(">>> Tag.update_nodes()") -pprint(session.Tag.update_nodes( - name=tag["name"], add=all_nodes_system_ids)) -``` diff --git a/doc/client/events.md b/doc/client/events.md index 70b02f65..f8d2e658 100644 --- a/doc/client/events.md +++ b/doc/client/events.md @@ -18,6 +18,7 @@ This accepts a plethora of optional arguments to narrow down the results: >>> events = client.events.query(agent_name=…) >>> events = client.events.query(level=…) >>> events = client.events.query(after=…, limit=…) +>>> events = client.events.query(owner=…) ``` These arguments can be combined to narrow the results even further. diff --git a/doc/client/index.md b/doc/client/index.md index edf64f6d..d6f6e9ad 100644 --- a/doc/client/index.md +++ b/doc/client/index.md @@ -9,9 +9,9 @@ Web API. ## An example ```python -#!/usr/bin/env python3.5 +#!/usr/bin/env python3.6 -from maas.client import connect +import maas.client # Replace … with an API key previously obtained by hand from # http://$host:$port/MAAS/account/prefs/. @@ -73,7 +73,7 @@ heavily in _python-libmaas_ — along with the ``Asynchronous`` metaclass — to create the automatic blocking/not-blocking behaviour. ```python -#!/usr/bin/env python3.5 +#!/usr/bin/env python3.6 from maas.client import login from maas.client.utils.async import asynchronous diff --git a/doc/client/nodes.md b/doc/client/nodes.md index 186d3660..2a3f308f 100644 --- a/doc/client/nodes.md +++ b/doc/client/nodes.md @@ -94,6 +94,19 @@ Delete a machine is simple as calling delete on the machine object. >>> machine.delete() ``` +## Assigning tags + +Assigning tags to a machine is as simple as calling `add` or `remove` on +`tags` attribute. + +```pycon +>>> new_tag = client.tags.create('new') +>>> machine.tags.add(new_tag) +>>> machine.tags +]> +>>> machine.tags.remove(new_tag) +``` + ## Commissioning and testing Easily commission a machine and wait until it successfully completes. By @@ -109,7 +122,7 @@ A more advanced asyncio based script that runs commissioning with extra scripts and waits until all machines have successfully commissioned. ```python -#!/usr/bin/env python3.5 +#!/usr/bin/env python3 import asyncio diff --git a/doc/index.md b/doc/index.md index e20d4b8e..5813d83e 100644 --- a/doc/index.md +++ b/doc/index.md @@ -27,7 +27,7 @@ until we release a beta all APIs could change. Either work from a branch: ```console -$ git clone https://github.com/maas/python-libmaas.git +$ git clone https://github.com/canonical/python-libmaas.git $ cd python-libmaas $ make ``` @@ -36,14 +36,14 @@ Or install with [pip](https://pip.pypa.io/) into a [virtualenv](https://virtualenv.readthedocs.org/): ```console -$ virtualenv --python=python3.5 amc && source amc/bin/activate -$ pip install git+https://github.com/maas/python-libmaas.git +$ virtualenv --python=python3 amc && source amc/bin/activate +$ pip install git+https://github.com/canonical/python-libmaas.git ``` Or install from [PyPI](https://pypi.python.org/): ```console -$ virtualenv --python=python3.5 amc && source amc/bin/activate +$ virtualenv --python=python3 amc && source amc/bin/activate $ pip install python-libmaas ``` @@ -228,23 +228,6 @@ Pull requests are welcome but authors need to sign the [Canonical contributor license agreement][CCLA] before those PRs can be merged. -### _bones_ & _viscera_ - -Digging around in the code and when using the primary client API, you -may find references to _bones_ and _viscera_. These libraries form the -base for the client API: - -* [_bones_](bones/index.md) is a lower-level library that closely - mirrors MAAS's Web API. Every MAAS server publishes a description of - its Web API and _bones_ generates a convenient mechanism to interact - with it. - -* [_viscera_](viscera/index.md) is a higher-level library which makes - heavy use of _bones_. MAAS's Web API is sometimes unfriendly or - inconsistent, but _viscera_ presents a hand-crafted API that has been - designed for developers rather than machines. - - [asyncio]: https://docs.python.org/3/library/asyncio.html [CCLA]: https://www.ubuntu.com/legal/contributors diff --git a/doc/viscera/events.md b/doc/viscera/events.md deleted file mode 100644 index 5a087621..00000000 --- a/doc/viscera/events.md +++ /dev/null @@ -1,33 +0,0 @@ -

Events

- -Events are similar to other _viscera_ objects... but a little different -too. The only way to get events is by the ``query`` method: - -```pycon ->>> events = origin.Events.query() -``` - -This accepts a plethora of optional arguments to narrow down the results: - -```pycon ->>> events = origin.Events.query(hostnames={"foo", "bar"}) ->>> events = origin.Events.query(domains={"example.com", "maas.io"}) ->>> events = origin.Events.query(zones=["red", "blue"]) ->>> events = origin.Events.query(macs=("12:34:56:78:90:ab", )) ->>> events = origin.Events.query(system_ids=…) ->>> events = origin.Events.query(agent_name=…) ->>> events = origin.Events.query(level=…) ->>> events = origin.Events.query(after=…, limit=…) -``` - -These arguments can be combined to narrow the results even further. - -The ``level`` argument is a little special. It's a choice from a -predefined set. For convenience, those choices are defined in the -``Level`` enum: - -```pycon ->>> events = origin.Events.query(level=origin.Events.Level.ERROR) -``` - -but you can also pass in the string "ERROR" or the number 40. diff --git a/doc/viscera/getting-started.md b/doc/viscera/getting-started.md deleted file mode 100644 index 0ecd4832..00000000 --- a/doc/viscera/getting-started.md +++ /dev/null @@ -1,79 +0,0 @@ -

Getting started with viscera

- - -## Logging-in - -Log-in using the command-line tool and start an interactive Python -shell: - -```console -$ maas profiles login foo http://example.com:5240/MAAS/ admin -Password: … -$ maas shell -``` - -This will provide you with a pre-prepared `origin` object that points to -`foo` from above. This is the root object of the API. - -If you have MAAS 2.2 or later you can also log-in programmatically: - -```pycon ->>> profile, origin = Origin.login( -... "http://example.com:5240/MAAS/", username="admin", -... password="…") -``` - -The `profile` has not been saved, but it's easy to do so: - -```pycon ->>> profile = profile.replace(name="foo") ->>> with ProfileStore.open() as store: -... store.save(profile) -... store.default = profile -``` - -This does the same as the `maas profiles login` command. - -But there's no need! There's a command built in to do it for you: - -```console -$ bin/maas shell -Welcome to the MAAS shell. - -Predefined variables: - - origin: A `viscera` origin, configured for foo. - session: A `bones` session, configured for foo. - ->>> -``` - - -## Logging-out - -Log-out using the command-line tool: - -```console -$ bin/maas profiles remove foo -``` - -or, programmatically: - -```pycon ->>> with ProfileStore.open() as store: -... store.delete("foo") -``` - - -## `dir()`, `help()`, and tab-completion - -The _viscera_ API has been designed to be very discoverable using -tab-completion, `dir()`, `help()`, and so on. Start with that: - -```pycon ->>> origin. -… -``` - -This works best when you've got [IPython](https://ipython.org/) -installed. diff --git a/doc/viscera/index.md b/doc/viscera/index.md deleted file mode 100644 index 4e52bf32..00000000 --- a/doc/viscera/index.md +++ /dev/null @@ -1,23 +0,0 @@ -

viscera — High-level Python client API

- - -## Some example code - -```python -#!/usr/bin/env python3.5 - -from pprint import pprint -from maas.client import viscera - -profile, origin = viscera.Origin.login( - "http://localhost:5240/MAAS/", username="alice", - password="wonderland") - -# List all the tags. -print(">>> origin.Tags.read()") -pprint(origin.Tags.read()) - -# List all machines. -print(">>> origin.Machines.read()") -pprint(origin.Machines.read()) -``` diff --git a/doc/viscera/nodes.md b/doc/viscera/nodes.md deleted file mode 100644 index eed315b8..00000000 --- a/doc/viscera/nodes.md +++ /dev/null @@ -1,86 +0,0 @@ -

Machines, devices, racks, and regions

- -Given an ``Origin`` instance bound to your MAAS server, you can -interrogate your nodes with: - -```python -origin.Machines.read() - # returns an origin.Machines instance, a - # sequence of origin.Machine instances. - -origin.Devices.read() - # returns an origin.Devices instance, a - # sequence of origin.Device instances. - -origin.RackControllers.read() - # returns an origin.RackControllers instance, a - # sequence of origin.RackController instances. - -origin.RegionControllers.read() - # returns an origin.RegionControllers instance, a - # sequence of origin.RegionController instances. -``` - - -## An example - -```pycon ->>> for machine in origin.Machines.read(): -... print(repr(node)) - -``` - -Individual nodes can be read from the Web API. - -```pycon ->>> machine = origin.Machine.read(system_id="pncys4") ->>> machine - -``` - -Machines — and devices, racks, and regions — have many useful -attributes: - -```pycon ->>> machine.architecture -'amd64/generic' ->>> machine.cpus -4 ->>> for interface in machine.interfaces: -... print(repr(Interface)) - - -``` - -Don't forget to try using tab-completion — the objects have been -designed to be particularly friendly for interactive use — or -``dir(machine)`` to find out what other fields and methods are -available. - -__TODO__: Updating nodes. - - -## Allocating and deploying - -```pycon ->>> help(origin.Machines.allocate) -Help on method allocate in module maas.client.viscera.machines: - -allocate( - *, hostname:str=None, architecture:str=None, cpus:int=None, - memory:float=None, tags:typing.Sequence=None) - method of maas.client.viscera.machines.MachinesType instance - :param hostname: The hostname to match. - :param architecture: The architecture to match, e.g. "amd64". - :param cpus: The minimum number of CPUs to match. - :param memory: The minimum amount of RAM to match. - :param tags: The tags to match, as a sequence. Each tag may be - prefixed with a hyphen to denote that the given tag should NOT be - associated with a matched machine. ->>> machine = origin.Machines.allocate(tags=("foo", "-bar")) ->>> print(machine.status_name) -Acquired ->>> machine.deploy() ->>> print(machine.status_name) -Deploying -``` diff --git a/doc/viscera/other.md b/doc/viscera/other.md deleted file mode 100644 index 3c5407bb..00000000 --- a/doc/viscera/other.md +++ /dev/null @@ -1,39 +0,0 @@ -

Other objects

- -There are several other object types available via _viscera_. Use -``dir()`` and tab-completion to dig around interactively, or read the -code; we've tried to keep it readable. - - -## Files, users, tags - -Similarly to nodes, these sets of objects can be fetched: - -```pycon ->>> tags = origin.Tags.read() ->>> files = origin.Files.read() ->>> users = origin.Users.read() -``` - -When reading from collections, as above, the returned object is -list-like: - -```pycon ->>> len(tags) -5 ->>> tags[3] - ->>> tags[3] in tags -True ->>> not_foo = [tag for tag in tags if tag.name != 'foo'] ->>> len(not_foo) -4 -``` - -However, it's read-only: - -```pycon ->>> tags[0] = "bob" -… -TypeError: 'Tags' object does not support item assignment -``` diff --git a/maas/__init__.py b/maas/__init__.py index de40ea7c..5284146e 100644 --- a/maas/__init__.py +++ b/maas/__init__.py @@ -1 +1 @@ -__import__('pkg_resources').declare_namespace(__name__) +__import__("pkg_resources").declare_namespace(__name__) diff --git a/maas/client/__init__.py b/maas/client/__init__.py index 36603456..874a8230 100644 --- a/maas/client/__init__.py +++ b/maas/client/__init__.py @@ -1,11 +1,8 @@ """Basic entry points.""" -__all__ = [ - "connect", - "login", -] +__all__ = ["connect", "login"] -from .utils.async import asynchronous +from .utils.maas_async import asynchronous @asynchronous @@ -21,8 +18,8 @@ async def connect(url, *, apikey=None, insecure=False): """ from .facade import Client # Lazy. from .viscera import Origin # Lazy. - profile, origin = await Origin.connect( - url, apikey=apikey, insecure=insecure) + + profile, origin = await Origin.connect(url, apikey=apikey, insecure=insecure) return Client(origin) @@ -39,6 +36,8 @@ async def login(url, *, username=None, password=None, insecure=False): """ from .facade import Client # Lazy. from .viscera import Origin # Lazy. + profile, origin = await Origin.login( - url, username=username, password=password, insecure=insecure) + url, username=username, password=password, insecure=insecure + ) return Client(origin) diff --git a/maas/client/bones/__init__.py b/maas/client/bones/__init__.py index 7bda0bb6..7b5b8d31 100644 --- a/maas/client/bones/__init__.py +++ b/maas/client/bones/__init__.py @@ -4,23 +4,21 @@ hence the name "bones". """ -__all__ = [ - "CallError", - "SessionAPI", -] - -from collections import ( - Iterable, - namedtuple, -) +__all__ = ["CallError", "SessionAPI"] + +import typing + +from collections import namedtuple +from collections.abc import Iterable import json +from urllib.parse import urlparse import aiohttp from . import helpers from .. import utils from ..utils import profiles -from ..utils.async import asynchronous +from ..utils.maas_async import asynchronous class SessionError(Exception): @@ -32,17 +30,16 @@ class SessionAPI: @classmethod @asynchronous - async def fromURL( - cls, url, *, credentials=None, insecure=False): + async def fromURL(cls, url, *, credentials=None, insecure=False): """Return a `SessionAPI` for a given MAAS instance.""" try: - description = await helpers.fetch_api_description( - url, credentials=credentials, insecure=insecure) + description = await helpers.fetch_api_description(url, insecure=insecure) except helpers.RemoteError as error: # For now just re-raise as SessionError. raise SessionError(str(error)) else: - session = cls(description, credentials) + session = cls(url, description, credentials) + session.scheme = urlparse(url).scheme session.insecure = insecure return session @@ -52,7 +49,10 @@ def fromProfile(cls, profile): :see: `ProfileStore`. """ - return cls(profile.description, profile.credentials) + session = cls(profile.url, profile.description, profile.credentials) + session.scheme = urlparse(profile.url).scheme + session.insecure = profile.other.get("insecure", False) + return session @classmethod def fromProfileName(cls, name): @@ -65,8 +65,7 @@ def fromProfileName(cls, name): @classmethod @asynchronous - async def login( - cls, url, *, username=None, password=None, insecure=False): + async def login(cls, url, *, username=None, password=None, insecure=False): """Make a `SessionAPI` by logging-in with a username and password. :return: A tuple of ``profile`` and ``session``, where the former is @@ -74,38 +73,42 @@ async def login( instance made using the profile. """ profile = await helpers.login( - url=url, username=username, password=password, insecure=insecure) - session = cls(profile.description, profile.credentials) + url=url, username=username, password=password, insecure=insecure + ) + session = cls(url, profile.description, profile.credentials) + session.scheme = urlparse(url).scheme session.insecure = insecure return profile, session @classmethod @asynchronous - async def connect( - cls, url, *, apikey=None, insecure=False): + async def connect(cls, url, *, apikey=None, insecure=False): """Make a `SessionAPI` by connecting with an apikey. :return: A tuple of ``profile`` and ``session``, where the former is an unsaved `Profile` instance, and the latter is a `SessionAPI` instance made using the profile. """ - profile = await helpers.connect( - url=url, apikey=apikey, insecure=insecure) - session = cls(profile.description, profile.credentials) + profile = await helpers.connect(url=url, apikey=apikey, insecure=insecure) + session = cls(url, profile.description, profile.credentials) + session.scheme = urlparse(url).scheme session.insecure = insecure return profile, session # Set these on instances. + scheme = "http" insecure = False debug = False - def __init__(self, description, credentials=None): + def __init__(self, url, description, credentials=None): """Construct a `SessionAPI`. + :param url: MAAS URL :param description: The description of the remote API. See `fromURL`. :param credentials: Credentials for the remote system. Optional. """ super(SessionAPI, self).__init__() + self.__url = url self.__description = description self.__credentials = credentials self.__populate() @@ -115,15 +118,15 @@ def __populate(self): if self.__credentials is None: for resource in resources: if resource["anon"] is not None: - handler = HandlerAPI(resource["anon"], resource, self) + handler = HandlerAPI(self.__url, resource["anon"], resource, self) setattr(self, handler.name, handler) else: for resource in resources: if resource["auth"] is not None: - handler = HandlerAPI(resource["auth"], resource, self) + handler = HandlerAPI(self.__url, resource["auth"], resource, self) setattr(self, handler.name, handler) elif resource["anon"] is not None: - handler = HandlerAPI(resource["anon"], resource, self) + handler = HandlerAPI(self.__url, resource["anon"], resource, self) setattr(self, handler.name, handler) @property @@ -153,9 +156,10 @@ class HandlerAPI: operations. """ - def __init__(self, handler, resource, session): + def __init__(self, url, handler, resource, session): """Construct a `HandlerAPI`. + :param url: MAAS URL :param handler: The handler description from the overall API description document. See `SessionAPI`. :param resource: The parent of `handler` in the API description @@ -163,6 +167,7 @@ def __init__(self, handler, resource, session): :param session: The `SessionAPI`. """ super(HandlerAPI, self).__init__() + self.__url = url self.__handler = handler self.__resource = resource self.__session = session @@ -186,7 +191,8 @@ def uri(self): This will typically contain replacement patterns; these are interpolated in `CallAPI`. """ - return self.__handler["uri"] + url = urlparse(self.__url) + return f"{url.scheme}://{url.netloc}{self.__handler['path']}" @property def params(self): @@ -209,7 +215,8 @@ def session(self): @property def actions(self): return [ - (name, value) for name, value in vars(self).items() + (name, value) + for name, value in vars(self).items() if not name.startswith("_") and isinstance(value, ActionAPI) ] @@ -296,34 +303,44 @@ async def __call__(self, **data): """ data = dict(data) params = {name: data.pop(name) for name in self.handler.params} - for key, value in data.items(): - if key.startswith('_'): + for key, value in data.copy().items(): + if isinstance(value, typing.Mapping): + del data[key] + for nested_key, nested_value in value.items(): + data[key + "_" + nested_key] = nested_value + for key, value in data.copy().items(): + if key.startswith("_"): data[key[1:]] = data.pop(key) response = await self.bind(**params).call(**data) return response.data def __repr__(self): if self.op is None: - return "" % ( - self.fullname, self.method, self.handler.uri) + return "" % (self.fullname, self.method, self.handler.uri) else: return "" % ( - self.fullname, self.method, self.handler.uri, self.op) + self.fullname, + self.method, + self.handler.uri, + self.op, + ) CallResult = namedtuple("CallResult", ("response", "content", "data")) class CallError(Exception): - def __init__(self, request, response, content, call): desc_for_request = "%(method)s %(uri)s" % request desc_for_response = "HTTP %s %s" % (response.status, response.reason) desc_for_content = content.decode("utf-8", "replace") desc = "%s -> %s (%s)" % ( - desc_for_request, desc_for_response, - desc_for_content if len(desc_for_content) <= 50 else ( - desc_for_content[:49] + "…")) + desc_for_request, + desc_for_response, + desc_for_content + if len(desc_for_content) <= 50 + else (desc_for_content[:49] + "…"), + ) super(CallError, self).__init__(desc) self.request = request self.response = response @@ -336,7 +353,6 @@ def status(self): class CallAPI: - def __init__(self, params, action): """Create a new `CallAPI`. @@ -356,9 +372,10 @@ def __validate(self): raise TypeError("%s takes no arguments" % self.action.fullname) else: params_expected_desc = ", ".join(sorted(params_expected)) - raise TypeError("%s takes %d arguments: %s" % ( - self.action.fullname, len(params_expected), - params_expected_desc)) + raise TypeError( + "%s takes %d arguments: %s" + % (self.action.fullname, len(params_expected), params_expected_desc) + ) @property def action(self): @@ -371,7 +388,10 @@ def uri(self): # TODO: this is el-cheapo URI Template # support; use uritemplate-py # here? - return self.action.handler.uri.format(**self.__params) + uri = urlparse(self.action.handler.uri) + if uri.scheme != self.action.handler.session.scheme: + uri = uri._replace(scheme=self.action.handler.session.scheme) + return uri.geturl().format(**self.__params) def rebind(self, **params): """Rebind the parameters into the URI. @@ -399,9 +419,10 @@ def prepare(self, data): :param data: Data to pass in the *body* of the request. :type data: dict """ + def expand(data): for name, value in data.items(): - if isinstance(value, Iterable): + if isinstance(value, Iterable) and not isinstance(value, str): for value in value: yield name, value else: @@ -417,7 +438,8 @@ def expand(data): # Bundle things up ready to throw over the wire. uri, body, headers = utils.prepare_payload( - self.action.op, self.action.method, self.uri, data) + self.action.op, self.action.method, self.uri, data + ) # Headers are returned as a list, but they must be a dict for # the signing machinery. @@ -442,8 +464,8 @@ async def dispatch(self, uri, body, headers): session = aiohttp.ClientSession(connector=connector) async with session: response = await session.request( - self.action.method, uri, data=body, - headers=_prefer_json(headers)) + self.action.method, uri, data=body, headers=_prefer_json(headers) + ) async with response: # Fetch the raw body content. content = await response.read() @@ -465,14 +487,14 @@ async def dispatch(self, uri, body, headers): # Decode from JSON if that's what it's declared as. if response.content_type is None: data = await response.read() - elif response.content_type.endswith('/json'): + elif response.content_type.endswith("/json"): data = await response.json() else: data = await response.read() if response.content_type is None: data = content - elif response.content_type.endswith('/json'): + elif response.content_type.endswith("/json"): # JSON should always be UTF-8. data = json.loads(content.decode("utf-8")) else: diff --git a/maas/client/bones/helpers.py b/maas/client/bones/helpers.py index 7b940151..a875b995 100644 --- a/maas/client/bones/helpers.py +++ b/maas/client/bones/helpers.py @@ -14,21 +14,19 @@ "UsernameWithoutPassword", ] +import asyncio +from concurrent import futures from getpass import getuser from http import HTTPStatus from socket import gethostname import typing -from urllib.parse import ( - ParseResult, - SplitResult, - urljoin, - urlparse, -) +from urllib.parse import ParseResult, SplitResult, urljoin, urlparse import aiohttp +from macaroonbakery import httpbakery from ..utils import api_url -from ..utils.async import asynchronous +from ..utils.maas_async import asynchronous from ..utils.creds import Credentials from ..utils.profiles import Profile @@ -38,22 +36,19 @@ class RemoteError(Exception): async def fetch_api_description( - url: typing.Union[str, ParseResult, SplitResult], - credentials: typing.Optional[Credentials]=None, - insecure: bool=False): + url: typing.Union[str, ParseResult, SplitResult], insecure: bool = False +): """Fetch the API description from the remote MAAS instance.""" url_describe = urljoin(_ensure_url_string(url), "describe/") connector = aiohttp.TCPConnector(verify_ssl=(not insecure)) session = aiohttp.ClientSession(connector=connector) async with session, session.get(url_describe) as response: if response.status != HTTPStatus.OK: - raise RemoteError( - "{0} -> {1.status} {1.reason}".format( - url, response)) + raise RemoteError("{0} -> {1.status} {1.reason}".format(url, response)) elif response.content_type != "application/json": raise RemoteError( - "Expected application/json, got: %s" - % response.content_type) + "Expected application/json, got: %s" % response.content_type + ) else: return await response.json() @@ -62,13 +57,10 @@ def _ensure_url_string(url): """Convert `url` to a string URL if it isn't one already.""" if isinstance(url, str): return url - elif isinstance(url, ParseResult): - return url.geturl() - elif isinstance(url, SplitResult): + elif isinstance(url, (ParseResult, SplitResult)): return url.geturl() else: - raise TypeError( - "Could not convert %r to a string URL." % (url,)) + raise TypeError("Could not convert %r to a string URL." % (url,)) def derive_resource_name(name): @@ -108,25 +100,29 @@ async def connect(url, *, apikey=None, insecure=False): if url.username is not None: raise ConnectError( "Cannot provide user-name explicitly in URL (%r) when connecting; " - "use login instead." % url.username) + "use login instead." % url.username + ) if url.password is not None: raise ConnectError( "Cannot provide password explicitly in URL (%r) when connecting; " - "use login instead." % url.username) + "use login instead." % url.username + ) if apikey is None: credentials = None # Anonymous access. else: credentials = Credentials.parse(apikey) - # Circular import. - from ..bones.helpers import fetch_api_description - description = await fetch_api_description(url, credentials, insecure) + description = await fetch_api_description(url, insecure) # Return a new (unsaved) profile. return Profile( - name=url.netloc, url=url.geturl(), credentials=credentials, - description=description) + name=url.netloc, + url=url.geturl(), + credentials=credentials, + description=description, + insecure=insecure, + ) class LoginError(Exception): @@ -145,8 +141,12 @@ class LoginNotSupported(LoginError): """Server does not support login-type auth for API clients.""" +class MacaroonLoginNotSupported(LoginError): + """Server does not support macaroon auth for API clients.""" + + @asynchronous -async def login(url, *, username=None, password=None, insecure=False): +async def login(url, *, anonymous=False, username=None, password=None, insecure=False): """Log-in to a remote MAAS instance. Returns a new :class:`Profile` which has NOT been saved. To log-in AND @@ -177,7 +177,8 @@ async def login(url, *, username=None, password=None, insecure=False): else: raise LoginError( "User-name provided explicitly (%r) and in URL (%r); " - "provide only one." % (username, url.username)) + "provide only one." % (username, url.username) + ) if password is None: password = url.password @@ -187,34 +188,69 @@ async def login(url, *, username=None, password=None, insecure=False): else: raise LoginError( "Password provided explicitly (%r) and in URL (%r); " - "provide only one." % (password, url.password)) + "provide only one." % (password, url.password) + ) # Remove user-name and password from the URL. userinfo, _, hostinfo = url.netloc.rpartition("@") url = url._replace(netloc=hostinfo) if username is None: - if password is None or len(password) == 0: - credentials = None # Anonymous. - else: + if password: raise PasswordWithoutUsername( - "Password provided without user-name; specify user-name.") + "Password provided without user-name; specify user-name." + ) + elif anonymous: + credentials = None + else: + credentials = await authenticate_with_macaroon( + url.geturl(), insecure=insecure + ) else: if password is None: raise UsernameWithoutPassword( - "User-name provided without password; specify password.") + "User-name provided without password; specify password." + ) else: credentials = await authenticate( - url.geturl(), username, password, insecure=insecure) + url.geturl(), username, password, insecure=insecure + ) - # Circular import. - from ..bones.helpers import fetch_api_description - description = await fetch_api_description(url, credentials, insecure) + description = await fetch_api_description(url, insecure) + profile_name = username or url.netloc # Return a new (unsaved) profile. return Profile( - name=url.netloc, url=url.geturl(), credentials=credentials, - description=description) + name=profile_name, + url=url.geturl(), + credentials=credentials, + description=description, + insecure=insecure, + ) + + +async def authenticate_with_macaroon(url, insecure=False): + """Login via macaroons and generate and return new API keys.""" + executor = futures.ThreadPoolExecutor(max_workers=1) + + def get_token(): + client = httpbakery.Client() + resp = client.request( + "POST", + "{}/account/?op=create_authorisation_token".format(url), + verify=not insecure, + ) + if resp.status_code == HTTPStatus.UNAUTHORIZED: + # if the auteentication with Candid fails, an exception is raised + # above so we don't get here + raise MacaroonLoginNotSupported("Macaroon authentication not supported") + if resp.status_code != HTTPStatus.OK: + raise LoginError("Login failed: {}".format(resp.text)) + result = resp.json() + return "{consumer_key}:{token_key}:{token_secret}".format(**result) + + loop = asyncio.get_event_loop() + return await loop.run_in_executor(executor, get_token) async def authenticate(url, username, password, *, insecure=False): @@ -234,7 +270,9 @@ def check_response_is_okay(response): if response.status != HTTPStatus.OK: raise RemoteError( "{0} -> {1.status} {1.reason}".format( - response.url_obj.human_repr(), response)) + response.url_obj.human_repr(), response + ) + ) connector = aiohttp.TCPConnector(verify_ssl=(not insecure)) session = aiohttp.ClientSession(connector=connector) @@ -247,12 +285,14 @@ def check_response_is_okay(response): if "authenticate-api" not in version_info["capabilities"]: raise LoginNotSupported( "Server does not support automated client log-in. " - "Please obtain an API token via the MAAS UI.") + "Please obtain an API token via the MAAS UI." + ) # POST to the `authenticate` endpoint. data = { - "username": username, "password": password, - "consumer": "%s@%s" % (getuser(), gethostname()) + "username": username, + "password": password, + "consumer": "%s@%s" % (getuser(), gethostname()), } async with session.post(url_authn, data=data) as response: check_response_is_okay(response) diff --git a/maas/client/bones/testing/__init__.py b/maas/client/bones/testing/__init__.py index adefb869..9577e755 100644 --- a/maas/client/bones/testing/__init__.py +++ b/maas/client/bones/testing/__init__.py @@ -1,10 +1,6 @@ """Testing helpers for the Bones API.""" -__all__ = [ - "api_descriptions", - "DescriptionServer", - "list_api_descriptions", -] +__all__ = ["api_descriptions", "DescriptionServer", "list_api_descriptions"] import http import http.server @@ -15,10 +11,7 @@ import threading import fixtures -from pkg_resources import ( - resource_filename, - resource_listdir, -) +from pkg_resources import resource_filename, resource_listdir def list_api_descriptions(): @@ -29,7 +22,7 @@ def list_api_descriptions(): number of the API. """ for filename in resource_listdir(__name__, "."): - match = re.match("api(\d)(\d)[.]json", filename) + match = re.match(r"api(\d)(\d)[.]json", filename) if match is not None: version = tuple(map(int, match.groups())) path = resource_filename(__name__, filename) @@ -64,10 +57,7 @@ class DescriptionHandler(http.server.BaseHTTPRequestHandler): @classmethod def make(cls, description=description): - return type( - "DescriptionHandler", (cls, ), - {"description": description}, - ) + return type("DescriptionHandler", (cls,), {"description": description}) def setup(self): super(DescriptionHandler, self).setup() diff --git a/maas/client/bones/testing/desc.py b/maas/client/bones/testing/desc.py index d2cae6b7..46f68a46 100644 --- a/maas/client/bones/testing/desc.py +++ b/maas/client/bones/testing/desc.py @@ -1,9 +1,6 @@ """Abstractions around API description documents.""" -__all__ = [ - "Description", - "Action", -] +__all__ = ["Description", "Action"] from keyword import iskeyword from operator import itemgetter @@ -49,10 +46,7 @@ def __iter__(self): def __repr__(self): title, body = self.doc - return "<%s %r %s>" % ( - self.__class__.__name__, - title.rstrip("."), self.hash, - ) + return "<%s %r %s>" % (self.__class__.__name__, title.rstrip("."), self.hash) class Resources: @@ -109,10 +103,7 @@ def __iter__(self): def __repr__(self): title, body = self["doc"] - return "<%s:%s %r>" % ( - self.__class__.__name__, - self._name, title.rstrip("."), - ) + return "<%s:%s %r>" % (self.__class__.__name__, self._name, title.rstrip(".")) class Action: @@ -178,7 +169,11 @@ def action_name(self): def __repr__(self): title, body = self.doc return "<%s:%s.%s %r %s %s%s>" % ( - self.__class__.__name__, self._resource._name, - self.name, title.rstrip("."), self.method, self.path, - ("" if self.op is None else "?op=" + self.op) + self.__class__.__name__, + self._resource._name, + self.name, + title.rstrip("."), + self.method, + self.path, + ("" if self.op is None else "?op=" + self.op), ) diff --git a/maas/client/bones/testing/server.py b/maas/client/bones/testing/server.py index 05cea0fc..15648e89 100644 --- a/maas/client/bones/testing/server.py +++ b/maas/client/bones/testing/server.py @@ -1,8 +1,6 @@ """Testing server.""" -__all__ = [ - "ApplicationBuilder", -] +__all__ = ["ApplicationBuilder"] import asyncio from collections import defaultdict @@ -12,10 +10,7 @@ import re from urllib.parse import urlparse -from aiohttp.multipart import ( - CONTENT_DISPOSITION, - parse_content_disposition, -) +from aiohttp.multipart import CONTENT_DISPOSITION, parse_content_disposition import aiohttp.web from multidict import MultiDict @@ -23,13 +18,15 @@ class ApplicationBuilder: - def __init__(self, description): super(ApplicationBuilder, self).__init__() self._description = desc.Description(description) self._application = aiohttp.web.Application() - self._rootpath, self._basepath, self._version = ( - self._discover_version_and_paths()) + ( + self._rootpath, + self._basepath, + self._version, + ) = self._discover_version_and_paths() self._wire_up_description() self._actions = {} self._views = {} @@ -102,8 +99,7 @@ async def version(request): def serve(self): """Return an async context manager to serve the built application.""" - return ApplicationRunner( - self._application, self._basepath) + return ApplicationRunner(self._application, self._basepath) @staticmethod def _wrap_handler(handler): @@ -122,6 +118,7 @@ def _wrap_handler(handler): rendered as JSON. """ + async def wrapper(request): # For convenience, read in all multipart parameters. assert not hasattr(request, "params") @@ -158,8 +155,7 @@ def _discover_version_and_paths(self): base, root, version = match.groups() return root, base, version else: - raise ValueError( - "Could not discover version or paths.") + raise ValueError("Could not discover version or paths.") def _wire_up_description(self): """Arrange for the API description document to be served. @@ -170,12 +166,11 @@ def _wire_up_description(self): path = "%s/describe/" % self._basepath def describe(request): - description = self._render_description( - request.url.with_path("")) - description_json = json.dumps( - description, indent=" ", sort_keys=True) + description = self._render_description(request.url.with_path("")) + description_json = json.dumps(description, indent=" ", sort_keys=True) return aiohttp.web.Response( - text=description_json, content_type="application/json") + text=description_json, content_type="application/json" + ) self._application.router.add_get(path, describe) @@ -238,8 +233,8 @@ def _resolve_action(self, action_name): match = re.match(r"^(anon|auth):(\w+[.]\w+)$", action_name) if match is None: raise ValueError( - "Action should be (anon|auth):Resource.action, got: %s" - % (action_name,)) + "Action should be (anon|auth):Resource.action, got: %s" % (action_name,) + ) else: anon_auth, resource_name = match.groups() resources = getattr(self._description, anon_auth) @@ -284,8 +279,7 @@ async def __call__(self, request): handler = self.rest.get(request.method) if handler is None: - raise aiohttp.web.HTTPMethodNotAllowed( - request.method, self.allowed_methods) + raise aiohttp.web.HTTPMethodNotAllowed(request.method, self.allowed_methods) else: return await handler(request) @@ -303,10 +297,12 @@ async def __aenter__(self): self._handler = self._application.make_handler(loop=self._loop) await self._application.startup() self._server = await self._loop.create_server( - self._handler, host="0.0.0.0", port=0) + self._handler, host="0.0.0.0", port=0 + ) return "http://%s:%d/%s/" % ( *self._server.sockets[0].getsockname(), - self._basepath.strip("/")) + self._basepath.strip("/"), + ) async def __aexit__(self, *exc_info): self._server.close() @@ -323,8 +319,7 @@ async def _get_multipart_params(request): """ def get_part_name(part): - _, params = parse_content_disposition( - part.headers.get(CONTENT_DISPOSITION)) + _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION)) return params.get("name") def get_part_data(part): diff --git a/maas/client/bones/tests/test.py b/maas/client/bones/tests/test.py index 701f8723..1d7fa665 100644 --- a/maas/client/bones/tests/test.py +++ b/maas/client/bones/tests/test.py @@ -2,21 +2,11 @@ import json import random -from unittest.mock import ( - ANY, - Mock, -) -from urllib.parse import ( - parse_qsl, - urlparse, -) +from unittest.mock import ANY, Mock +from urllib.parse import parse_qsl, urlparse from uuid import uuid1 -from testtools.matchers import ( - Equals, - Is, - MatchesStructure, -) +from testtools.matchers import Equals, Is, MatchesStructure from .. import testing from ... import bones @@ -25,39 +15,39 @@ class TestSessionAPI(TestCase): - def test__fromURL_raises_SessionError_when_request_fails(self): fixture = self.useFixture(testing.DescriptionServer(b"bogus")) error = self.assertRaises( - bones.SessionError, bones.SessionAPI.fromURL, - fixture.url + "bogus/") - self.assertEqual( - fixture.url + "bogus/ -> 404 Not Found", - str(error)) + bones.SessionError, bones.SessionAPI.fromURL, fixture.url + "bogus/" + ) + self.assertEqual(fixture.url + "bogus/ -> 404 Not Found", str(error)) def test__fromURL_raises_SessionError_when_content_not_json(self): fixture = self.useFixture(testing.DescriptionServer()) fixture.handler.content_type = "text/json" error = self.assertRaises( - bones.SessionError, bones.SessionAPI.fromURL, fixture.url) - self.assertEqual( - "Expected application/json, got: text/json", - str(error)) + bones.SessionError, bones.SessionAPI.fromURL, fixture.url + ) + self.assertEqual("Expected application/json, got: text/json", str(error)) async def test__fromURL_sets_credentials_on_session(self): fixture = self.useFixture(testing.DescriptionServer()) credentials = make_Credentials() - session = await bones.SessionAPI.fromURL( - fixture.url, credentials=credentials) + session = await bones.SessionAPI.fromURL(fixture.url, credentials=credentials) self.assertIs(credentials, session.credentials) async def test__fromURL_sets_insecure_on_session(self): insecure = random.choice((True, False)) fixture = self.useFixture(testing.DescriptionServer()) - session = await bones.SessionAPI.fromURL( - fixture.url, insecure=insecure) + session = await bones.SessionAPI.fromURL(fixture.url, insecure=insecure) self.assertThat(session.insecure, Is(insecure)) + async def test__fromURL_sets_scheme_on_session(self): + insecure = random.choice((True, False)) + fixture = self.useFixture(testing.DescriptionServer()) + session = await bones.SessionAPI.fromURL(fixture.url, insecure=insecure) + self.assertThat(session.scheme, Equals("http")) + class TestSessionAPI_APIVersions(TestCase): """Tests for `SessionAPI` with multiple API versions.""" @@ -71,9 +61,7 @@ async def test__fromURL_downloads_description(self): description = self.path.read_bytes() fixture = self.useFixture(testing.DescriptionServer(description)) session = await bones.SessionAPI.fromURL(fixture.url) - self.assertEqual( - json.loads(description.decode("utf-8")), - session.description) + self.assertEqual(json.loads(description.decode("utf-8")), session.description) class TestActionAPI_APIVersions(TestCase): @@ -84,34 +72,55 @@ class TestActionAPI_APIVersions(TestCase): for name, version, description in testing.api_descriptions ) + url = "http://127.0.0.1:8080/MAAS/api/2.0/" + def test__Version_read(self): - session = bones.SessionAPI(self.description) + session = bones.SessionAPI(self.url, self.description) action = session.Version.read - self.assertThat(action, MatchesStructure.byEquality( - name="read", fullname="Version.read", method="GET", - handler=session.Version, is_restful=True, op=None, - )) + self.assertThat( + action, + MatchesStructure.byEquality( + name="read", + fullname="Version.read", + method="GET", + handler=session.Version, + is_restful=True, + op=None, + ), + ) def test__Machines_deployment_status(self): if self.version > (2, 0): self.skipTest("Machines.deployment_status only in <= 2.0") - session = bones.SessionAPI(self.description, ("a", "b", "c")) + session = bones.SessionAPI(self.url, self.description, ("a", "b", "c")) action = session.Machines.deployment_status - self.assertThat(action, MatchesStructure.byEquality( - name="deployment_status", fullname="Machines.deployment_status", - method="GET", handler=session.Machines, is_restful=False, - op="deployment_status", - )) + self.assertThat( + action, + MatchesStructure.byEquality( + name="deployment_status", + fullname="Machines.deployment_status", + method="GET", + handler=session.Machines, + is_restful=False, + op="deployment_status", + ), + ) def test__Machines_power_parameters(self): - session = bones.SessionAPI(self.description, ("a", "b", "c")) + session = bones.SessionAPI(self.url, self.description, ("a", "b", "c")) action = session.Machines.power_parameters - self.assertThat(action, MatchesStructure.byEquality( - name="power_parameters", fullname="Machines.power_parameters", - method="GET", handler=session.Machines, is_restful=False, - op="power_parameters", - )) + self.assertThat( + action, + MatchesStructure.byEquality( + name="power_parameters", + fullname="Machines.power_parameters", + method="GET", + handler=session.Machines, + is_restful=False, + op="power_parameters", + ), + ) class TestCallAPI_APIVersions(TestCase): @@ -122,9 +131,11 @@ class TestCallAPI_APIVersions(TestCase): for name, version, description in testing.api_descriptions ) + url = "http://127.0.0.1:8080/MAAS/api/2.0/" + def test__marshals_lists_into_query_as_repeat_parameters(self): system_ids = list(str(uuid1()) for _ in range(3)) - session = bones.SessionAPI(self.description, ("a", "b", "c")) + session = bones.SessionAPI(self.url, self.description, ("a", "b", "c")) call = session.Machines.power_parameters.bind() call.dispatch = Mock() @@ -134,7 +145,7 @@ def test__marshals_lists_into_query_as_repeat_parameters(self): uri, body, headers = call.dispatch.call_args[0] uri = urlparse(uri) self.assertThat(uri.path, Equals("/MAAS/api/2.0/machines/")) - query_expected = [('op', 'power_parameters')] - query_expected.extend(('nodes', system_id) for system_id in system_ids) + query_expected = [("op", "power_parameters")] + query_expected.extend(("nodes", system_id) for system_id in system_ids) query_observed = parse_qsl(uri.query) self.assertThat(query_observed, Equals(query_expected)) diff --git a/maas/client/bones/tests/test_helpers.py b/maas/client/bones/tests/test_helpers.py index 0c78f97d..ca740e1f 100644 --- a/maas/client/bones/tests/test_helpers.py +++ b/maas/client/bones/tests/test_helpers.py @@ -1,34 +1,17 @@ """Tests for `maas.client.bones.helpers`.""" import json -from urllib.parse import ( - urlparse, - urlsplit, -) +from unittest.mock import Mock +from urllib.parse import urlparse, urlsplit import aiohttp.web +from macaroonbakery.httpbakery import Client from testtools import ExpectedException -from testtools.matchers import ( - Equals, - Is, - IsInstance, - MatchesDict, -) - -from .. import ( - helpers, - testing, -) -from ...testing import ( - AsyncCallableMock, - make_name, - make_name_without_spaces, - TestCase, -) -from ...utils import ( - api_url, - profiles, -) +from testtools.matchers import Equals, Is, IsInstance, MatchesDict + +from .. import helpers, testing +from ...testing import AsyncCallableMock, make_name, make_name_without_spaces, TestCase +from ...utils import api_url, profiles from ...utils.testing import make_Credentials from ..testing import api_descriptions from ..testing.server import ApplicationBuilder @@ -40,21 +23,21 @@ class TestFetchAPIDescription(TestCase): def test__raises_RemoteError_when_request_fails(self): fixture = self.useFixture(testing.DescriptionServer(b"bogus")) error = self.assertRaises( - helpers.RemoteError, self.loop.run_until_complete, - helpers.fetch_api_description(fixture.url + "bogus/")) - self.assertEqual( - fixture.url + "bogus/ -> 404 Not Found", - str(error)) + helpers.RemoteError, + self.loop.run_until_complete, + helpers.fetch_api_description(fixture.url + "bogus/"), + ) + self.assertEqual(fixture.url + "bogus/ -> 404 Not Found", str(error)) def test__raises_RemoteError_when_content_not_json(self): fixture = self.useFixture(testing.DescriptionServer()) fixture.handler.content_type = "text/json" error = self.assertRaises( - helpers.RemoteError, self.loop.run_until_complete, - helpers.fetch_api_description(fixture.url)) - self.assertEqual( - "Expected application/json, got: text/json", - str(error)) + helpers.RemoteError, + self.loop.run_until_complete, + helpers.fetch_api_description(fixture.url), + ) + self.assertEqual("Expected application/json, got: text/json", str(error)) class TestFetchAPIDescriptionURLs(TestCase): @@ -72,7 +55,8 @@ def test__accepts_prepared_url(self): fixture = self.useFixture(testing.DescriptionServer(description_json)) description_url = self.prepare(fixture.url) # Parse, perhaps. description_fetched = self.loop.run_until_complete( - helpers.fetch_api_description(description_url)) + helpers.fetch_api_description(description_url) + ) self.assertThat(description_fetched, Equals(description)) @@ -88,10 +72,11 @@ def test__downloads_description(self): description = self.path.read_bytes() fixture = self.useFixture(testing.DescriptionServer(description)) description_fetched = self.loop.run_until_complete( - helpers.fetch_api_description(fixture.url)) + helpers.fetch_api_description(fixture.url) + ) self.assertThat( - description_fetched, Equals( - json.loads(description.decode("utf-8")))) + description_fetched, Equals(json.loads(description.decode("utf-8"))) + ) class TestConnect(TestCase): @@ -99,16 +84,14 @@ class TestConnect(TestCase): def setUp(self): super(TestConnect, self).setUp() - self.patch( - helpers, "fetch_api_description", - AsyncCallableMock(return_value={})) + self.patch(helpers, "fetch_api_description", AsyncCallableMock(return_value={})) - def test__anonymous_when_no_apikey_provided(self): + def test__anonymous(self): # Connect without an apikey. profile = helpers.connect("http://example.org:5240/MAAS/") helpers.fetch_api_description.assert_called_once_with( - urlparse("http://example.org:5240/MAAS/api/2.0/"), - None, False) + urlparse("http://example.org:5240/MAAS/api/2.0/"), False + ) # A Profile instance was returned with no credentials. self.assertThat(profile, IsInstance(profiles.Profile)) self.assertThat(profile.credentials, Is(None)) @@ -117,29 +100,31 @@ def test__connected_when_apikey_provided(self): credentials = make_Credentials() # Connect with an apikey. profile = helpers.connect( - "http://example.org:5240/MAAS/", apikey=str(credentials)) + "http://example.org:5240/MAAS/", apikey=str(credentials) + ) # The description was fetched. helpers.fetch_api_description.assert_called_once_with( - urlparse("http://example.org:5240/MAAS/api/2.0/"), - credentials, False) + urlparse("http://example.org:5240/MAAS/api/2.0/"), False + ) # A Profile instance was returned with the expected credentials. self.assertThat(profile, IsInstance(profiles.Profile)) self.assertThat(profile.credentials, Equals(credentials)) def test__complains_when_username_in_URL(self): self.assertRaises( - helpers.ConnectError, helpers.connect, - "http://foo:bar@example.org:5240/MAAS/") + helpers.ConnectError, + helpers.connect, + "http://foo:bar@example.org:5240/MAAS/", + ) def test__complains_when_password_in_URL(self): self.assertRaises( - helpers.ConnectError, helpers.connect, - "http://:bar@example.org:5240/MAAS/") + helpers.ConnectError, helpers.connect, "http://:bar@example.org:5240/MAAS/" + ) def test__URL_is_normalised_to_point_at_API_endpoint(self): profile = helpers.connect("http://example.org:5240/MAAS/") - self.assertThat(profile.url, Equals( - api_url("http://example.org:5240/MAAS/"))) + self.assertThat(profile.url, Equals(api_url("http://example.org:5240/MAAS/"))) def test__profile_is_given_default_name_based_on_URL(self): domain = make_name_without_spaces("domain") @@ -147,16 +132,16 @@ def test__profile_is_given_default_name_based_on_URL(self): self.assertThat(profile.name, Equals(domain)) def test__API_description_is_saved_in_profile(self): - description = helpers.fetch_api_description.return_value = { - "foo": "bar"} + description = helpers.fetch_api_description.return_value = {"foo": "bar"} profile = helpers.connect("http://example.org:5240/MAAS/") self.assertThat(profile.description, Equals(description)) def test__API_description_is_fetched_insecurely_if_requested(self): - helpers.connect("http://example.org:5240/MAAS/", insecure=True) + profile = helpers.connect("http://example.org:5240/MAAS/", insecure=True) helpers.fetch_api_description.assert_called_once_with( - urlparse("http://example.org:5240/MAAS/api/2.0/"), - None, True) + urlparse("http://example.org:5240/MAAS/api/2.0/"), True + ) + self.assertTrue(profile.other["insecure"]) class TestLogin(TestCase): @@ -164,25 +149,37 @@ class TestLogin(TestCase): def setUp(self): super(TestLogin, self).setUp() - self.patch( - helpers, "authenticate", - AsyncCallableMock(return_value=None)) - self.patch( - helpers, "fetch_api_description", - AsyncCallableMock(return_value={})) + self.patch(helpers, "authenticate", AsyncCallableMock(return_value=None)) + self.patch(helpers, "fetch_api_description", AsyncCallableMock(return_value={})) - def test__anonymous_when_neither_username_nor_password_provided(self): - # Log-in without a user-name or a password. - profile = helpers.login("http://example.org:5240/MAAS/") + def test__anonymous(self): + # Log-in anonymously. + profile = helpers.login("http://example.org:5240/MAAS/", anonymous=True) # No token was obtained, but the description was fetched. helpers.authenticate.assert_not_called() - helpers.fetch_api_description.assert_called_once_with( - urlparse("http://example.org:5240/MAAS/api/2.0/"), - None, False) # A Profile instance was returned with no credentials. self.assertThat(profile, IsInstance(profiles.Profile)) self.assertThat(profile.credentials, Is(None)) + def test__macaroon_auth_with_no_username_and_password(self): + credentials = make_Credentials() + self.patch( + helpers, + "authenticate_with_macaroon", + AsyncCallableMock(return_value=credentials), + ) + # Log-in without a user-name or a password. + profile = helpers.login("http://example.org:5240/MAAS/") + # A token is obtained via macaroons, but the description was fetched. + # The description was fetched. + helpers.fetch_api_description.assert_called_once_with( + urlparse("http://example.org:5240/MAAS/api/2.0/"), False + ) + # The returned profile uses credentials obtained from the + # authentication + self.assertThat(profile, IsInstance(profiles.Profile)) + self.assertThat(profile.credentials, Is(credentials)) + def test__authenticated_when_username_and_password_provided(self): credentials = make_Credentials() helpers.authenticate.return_value = credentials @@ -190,87 +187,105 @@ def test__authenticated_when_username_and_password_provided(self): profile = helpers.login("http://foo:bar@example.org:5240/MAAS/") # A token was obtained, and the description was fetched. helpers.authenticate.assert_called_once_with( - "http://example.org:5240/MAAS/api/2.0/", - "foo", "bar", insecure=False) - helpers.fetch_api_description.assert_called_once_with( - urlparse("http://example.org:5240/MAAS/api/2.0/"), - credentials, False) + "http://example.org:5240/MAAS/api/2.0/", "foo", "bar", insecure=False + ) # A Profile instance was returned with the expected credentials. self.assertThat(profile, IsInstance(profiles.Profile)) self.assertThat(profile.credentials, Is(credentials)) def test__complains_when_username_but_not_password(self): self.assertRaises( - helpers.UsernameWithoutPassword, helpers.login, - "http://example.org:5240/MAAS/", username="alice") + helpers.UsernameWithoutPassword, + helpers.login, + "http://example.org:5240/MAAS/", + username="alice", + ) def test__complains_when_password_but_not_username(self): self.assertRaises( - helpers.PasswordWithoutUsername, helpers.login, - "http://example.org:5240/MAAS/", password="wonderland") + helpers.PasswordWithoutUsername, + helpers.login, + "http://example.org:5240/MAAS/", + password="wonderland", + ) def test__complains_when_username_in_URL_and_passed_explicitly(self): self.assertRaises( - helpers.LoginError, helpers.login, - "http://foo:bar@example.org:5240/MAAS/", username="alice") + helpers.LoginError, + helpers.login, + "http://foo:bar@example.org:5240/MAAS/", + username="alice", + ) def test__complains_when_empty_username_in_URL_and_passed_explicitly(self): self.assertRaises( - helpers.LoginError, helpers.login, - "http://:bar@example.org:5240/MAAS/", username="alice") + helpers.LoginError, + helpers.login, + "http://:bar@example.org:5240/MAAS/", + username="alice", + ) def test__complains_when_password_in_URL_and_passed_explicitly(self): self.assertRaises( - helpers.LoginError, helpers.login, - "http://foo:bar@example.org:5240/MAAS/", password="wonderland") + helpers.LoginError, + helpers.login, + "http://foo:bar@example.org:5240/MAAS/", + password="wonderland", + ) def test__complains_when_empty_password_in_URL_and_passed_explicitly(self): self.assertRaises( - helpers.LoginError, helpers.login, - "http://foo:@example.org:5240/MAAS/", password="wonderland") + helpers.LoginError, + helpers.login, + "http://foo:@example.org:5240/MAAS/", + password="wonderland", + ) def test__URL_is_normalised_to_point_at_API_endpoint(self): - profile = helpers.login("http://example.org:5240/MAAS/") - self.assertThat(profile.url, Equals( - api_url("http://example.org:5240/MAAS/"))) + profile = helpers.login("http://example.org:5240/MAAS/", anonymous=True) + self.assertThat(profile.url, Equals(api_url("http://example.org:5240/MAAS/"))) def test__profile_is_given_default_name_based_on_URL(self): domain = make_name_without_spaces("domain") - profile = helpers.login("http://%s/MAAS/" % domain) + profile = helpers.login("http://%s/MAAS/" % domain, anonymous=True) self.assertThat(profile.name, Equals(domain)) def test__API_description_is_saved_in_profile(self): description = {make_name("key"): make_name("value")} helpers.fetch_api_description.return_value = description - profile = helpers.login("http://example.org:5240/MAAS/") + profile = helpers.login("http://example.org:5240/MAAS/", anonymous=True) self.assertThat(profile.description, Equals(description)) def test__API_token_is_fetched_insecurely_if_requested(self): - helpers.login("http://foo:bar@example.org:5240/MAAS/", insecure=True) + profile = helpers.login("http://foo:bar@example.org:5240/MAAS/", insecure=True) helpers.authenticate.assert_called_once_with( - "http://example.org:5240/MAAS/api/2.0/", - "foo", "bar", insecure=True) + "http://example.org:5240/MAAS/api/2.0/", "foo", "bar", insecure=True + ) + self.assertTrue(profile.other["insecure"]) def test__API_description_is_fetched_insecurely_if_requested(self): - helpers.login("http://example.org:5240/MAAS/", insecure=True) + helpers.login("http://example.org:5240/MAAS/", anonymous=True, insecure=True) helpers.fetch_api_description.assert_called_once_with( - urlparse("http://example.org:5240/MAAS/api/2.0/"), - None, True) + urlparse("http://example.org:5240/MAAS/api/2.0/"), True + ) def test__uses_username_from_URL_if_set(self): helpers.login("http://foo@maas.io/", password="bar") helpers.authenticate.assert_called_once_with( - "http://maas.io/api/2.0/", "foo", "bar", insecure=False) + "http://maas.io/api/2.0/", "foo", "bar", insecure=False + ) def test__uses_username_and_password_from_URL_if_set(self): helpers.login("http://foo:bar@maas.io/") helpers.authenticate.assert_called_once_with( - "http://maas.io/api/2.0/", "foo", "bar", insecure=False) + "http://maas.io/api/2.0/", "foo", "bar", insecure=False + ) def test__uses_empty_username_and_password_in_URL_if_set(self): helpers.login("http://:@maas.io/") helpers.authenticate.assert_called_once_with( - "http://maas.io/api/2.0/", "", "", insecure=False) + "http://maas.io/api/2.0/", "", "", insecure=False + ) class TestAuthenticate(TestCase): @@ -278,7 +293,8 @@ class TestAuthenticate(TestCase): scenarios = tuple( (name, dict(version=version, description=description)) - for name, version, description in api_descriptions) + for name, version, description in api_descriptions + ) async def test__obtains_credentials_from_server(self): builder = ApplicationBuilder(self.description) @@ -305,16 +321,20 @@ async def deploy(request): async with builder.serve() as baseurl: credentials_observed = await helpers.authenticate( - baseurl, username, password) + baseurl, username, password + ) + self.assertThat(credentials_observed, Equals(credentials)) self.assertThat( - credentials_observed, Equals(credentials)) - self.assertThat( - parameters, MatchesDict({ - "username": Equals(username), - "password": Equals(password), - "consumer": IsInstance(str), - })) + parameters, + MatchesDict( + { + "username": Equals(username), + "password": Equals(password), + "consumer": IsInstance(str), + } + ), + ) async def test__raises_error_when_server_does_not_support_authn(self): builder = ApplicationBuilder(self.description) @@ -343,25 +363,61 @@ async def deploy(request): await helpers.authenticate(baseurl, "username", "password") +class TestAuthenticateWithMacaroon(TestCase): + def setUp(self): + super().setUp() + self.mock_client_request = self.patch(Client, "request") + self.token_result = { + "consumer_key": "abc", + "token_key": "123", + "token_secret": "xyz", + } + self.mock_response = Mock() + self.mock_response.status_code = 200 + self.mock_response.json.return_value = self.token_result + self.mock_client_request.return_value = self.mock_response + + async def test__authenticate_with_bakery_creates_token(self): + credentials = await helpers.authenticate_with_macaroon("http://example.com") + self.assertEqual(credentials, "abc:123:xyz") + # a call to create an API token is made + self.mock_client_request.assert_called_once_with( + "POST", + "http://example.com/account/?op=create_authorisation_token", + verify=True, + ) + + async def test__authenticate_failed_request(self): + self.mock_response.status_code = 500 + self.mock_response.text = "error!" + try: + await helpers.authenticate_with_macaroon("http://example.com") + except helpers.LoginError as e: + self.assertEqual(str(e), "Login failed: error!") + else: + self.fail("LoginError not raised") + + async def test__authenticate_macaroon_not_supported(self): + self.mock_response.status_code = 401 + try: + await helpers.authenticate_with_macaroon("http://example.com") + except helpers.MacaroonLoginNotSupported as e: + self.assertEqual(str(e), "Macaroon authentication not supported") + else: + self.fail("MacaroonLoginNotSupported not raised") + + class TestDeriveResourceName(TestCase): """Tests for `derive_resource_name`.""" def test__removes_Anon_prefix(self): - self.assertThat( - helpers.derive_resource_name("AnonFooBar"), - Equals("FooBar")) + self.assertThat(helpers.derive_resource_name("AnonFooBar"), Equals("FooBar")) def test__removes_Handler_suffix(self): - self.assertThat( - helpers.derive_resource_name("FooBarHandler"), - Equals("FooBar")) + self.assertThat(helpers.derive_resource_name("FooBarHandler"), Equals("FooBar")) def test__normalises_Maas_to_MAAS(self): - self.assertThat( - helpers.derive_resource_name("Maas"), - Equals("MAAS")) + self.assertThat(helpers.derive_resource_name("Maas"), Equals("MAAS")) def test__does_all_the_above(self): - self.assertThat( - helpers.derive_resource_name("AnonMaasHandler"), - Equals("MAAS")) + self.assertThat(helpers.derive_resource_name("AnonMaasHandler"), Equals("MAAS")) diff --git a/maas/client/enum.py b/maas/client/enum.py index 8930ea1b..3da12cfb 100644 --- a/maas/client/enum.py +++ b/maas/client/enum.py @@ -1,6 +1,4 @@ -__all__ = [ - "NodeStatus", -] +__all__ = ["NodeStatus"] import enum @@ -74,20 +72,20 @@ class NodeType(enum.IntEnum): class PowerState(enum.Enum): #: On - ON = 'on' + ON = "on" #: Off - OFF = 'off' + OFF = "off" #: Unknown - UNKNOWN = 'unknown' + UNKNOWN = "unknown" #: Error - ERROR = 'error' + ERROR = "error" class PowerStopMode(enum.Enum): #: Perform hard stop. - HARD = 'hard' + HARD = "hard" #: Perform soft stop. - SOFT = 'soft' + SOFT = "soft" class RDNSMode(enum.IntEnum): @@ -101,30 +99,66 @@ class RDNSMode(enum.IntEnum): class IPRangeType(enum.Enum): #: Dynamic IP Range. - DYNAMIC = 'dynamic' + DYNAMIC = "dynamic" #: Reserved for exclusive use by MAAS or user. - RESERVED = 'reserved' + RESERVED = "reserved" class InterfaceType(enum.Enum): #: Physical interface. - PHYSICAL = 'physical' + PHYSICAL = "physical" #: Bonded interface. - BOND = 'bond' + BOND = "bond" #: Bridge interface. - BRIDGE = 'bridge' + BRIDGE = "bridge" #: VLAN interface. - VLAN = 'vlan' + VLAN = "vlan" #: Interface not linked to a node. - UNKNOWN = 'unknown' + UNKNOWN = "unknown" class LinkMode(enum.Enum): #: IP is auto assigned by MAAS. - AUTO = 'auto' + AUTO = "auto" #: IP is assigned by a DHCP server. - DHCP = 'dhcp' + DHCP = "dhcp" #: IP is statically assigned. - STATIC = 'static' + STATIC = "static" #: Connected to subnet with no IP address. - LINK_UP = 'link_up' + LINK_UP = "link_up" + + +class BlockDeviceType(enum.Enum): + #: Physical block device. + PHYSICAL = "physical" + #: Virtual block device. + VIRTUAL = "virtual" + + +class PartitionTableType(enum.Enum): + #: Master boot record + MBR = "mbr" + #: GUID Partition Table + GPT = "gpt" + + +class RaidLevel(enum.Enum): + #: RAID level 0 + RAID_0 = "raid-0" + #: RAID level 1 + RAID_1 = "raid-1" + #: RAID level 5 + RAID_5 = "raid-5" + #: RAID level 6 + RAID_6 = "raid-6" + #: RAID level 10 + RAID_10 = "raid-10" + + +class CacheMode(enum.Enum): + #: Writeback + WRITEBACK = "writeback" + #: Writethough + WRITETHROUGH = "writethrough" + #: Writearound + WRITEAROUND = "writearound" diff --git a/maas/client/errors.py b/maas/client/errors.py index 92edcdce..963339ff 100644 --- a/maas/client/errors.py +++ b/maas/client/errors.py @@ -1,28 +1,24 @@ """ Custom errors for libmaas """ -__all__ = [ - "MAASException", - "OperationNotAllowed" -] +__all__ = ["MAASException", "OperationNotAllowed"] class MAASException(Exception): - def __init__(self, msg, obj): super().__init__(msg) self.obj = obj class OperationNotAllowed(Exception): - """ MAAS says this operation cannot be performed. """ + """MAAS says this operation cannot be performed.""" class ObjectNotLoaded(Exception): - """ Object is not loaded. """ + """Object is not loaded.""" class CannotDelete(Exception): - """ Object cannot be deleted. """ + """Object cannot be deleted.""" class PowerError(MAASException): diff --git a/maas/client/facade.py b/maas/client/facade.py index ec4e61a6..0a8db25d 100644 --- a/maas/client/facade.py +++ b/maas/client/facade.py @@ -99,19 +99,37 @@ def boot_sources(origin): @facade def devices(origin): return { + "create": origin.Devices.create, "get": origin.Device.read, "list": origin.Devices.read, } @facade - def events(origin): - namespace = { - "query": origin.Events.query, + def dnsresources(origin): + return { + "get": origin.DNSResource.read, + "list": origin.DNSResources.read, + } + + @facade + def dnsresourcerecords(origin): + return { + "get": origin.DNSResourceRecord.read, + "list": origin.DNSResourceRecords.read, + } + + @facade + def domains(origin): + return { + "create": origin.Domains.create, + "get": origin.Domain.read, + "list": origin.Domains.read, } - namespace.update({ - level.name: level - for level in origin.Events.Level - }) + + @facade + def events(origin): + namespace = {"query": origin.Events.query} + namespace.update({level.name: level for level in origin.Events.Level}) return namespace @facade @@ -123,6 +141,14 @@ def fabrics(origin): "list": origin.Fabrics.read, } + @facade + def pods(origin): + return { + "create": origin.Pods.create, + "list": origin.Pods.read, + "get": origin.Pod.read, + } + @facade def static_routes(origin): return { @@ -150,9 +176,7 @@ def spaces(origin): @facade def files(origin): - return { - "list": origin.Files.read, - } + return {"list": origin.Files.read} @facade def ip_ranges(origin): @@ -162,6 +186,12 @@ def ip_ranges(origin): "list": origin.IPRanges.read, } + @facade + def ip_addresses(origin): + return { + "list": origin.IPAddresses.read, + } + @facade def maas(origin): attrs = ( @@ -170,9 +200,9 @@ def maas(origin): if not name.startswith("_") ) return { - name: attr for name, attr in attrs if - isinstance(attr, enum.EnumMeta) or - name.startswith(("get_", "set_")) + name: attr + for name, attr in attrs + if isinstance(attr, enum.EnumMeta) or name.startswith(("get_", "set_")) } @facade @@ -182,16 +212,12 @@ def machines(origin): "create": origin.Machines.create, "get": origin.Machine.read, "list": origin.Machines.read, - "get_power_parameters_for": - origin.Machines.get_power_parameters_for, + "get_power_parameters_for": origin.Machines.get_power_parameters_for, } @facade def rack_controllers(origin): - return { - "get": origin.RackController.read, - "list": origin.RackControllers.read, - } + return {"get": origin.RackController.read, "list": origin.RackControllers.read} @facade def region_controllers(origin): @@ -226,9 +252,7 @@ def users(origin): @facade def version(origin): - return { - "get": origin.Version.read, - } + return {"get": origin.Version.read} @facade def zones(origin): @@ -237,3 +261,11 @@ def zones(origin): "get": origin.Zone.read, "list": origin.Zones.read, } + + @facade + def resource_pools(origin): + return { + "create": origin.ResourcePools.create, + "get": origin.ResourcePool.read, + "list": origin.ResourcePools.read, + } diff --git a/maas/client/flesh/__init__.py b/maas/client/flesh/__init__.py index b2de6da1..7c1fa644 100644 --- a/maas/client/flesh/__init__.py +++ b/maas/client/flesh/__init__.py @@ -11,12 +11,10 @@ "TableCommand", ] -from abc import ( - ABCMeta, - abstractmethod, -) +from abc import ABCMeta, abstractmethod import argparse from importlib import import_module +import os import subprocess import sys import textwrap @@ -26,16 +24,9 @@ import colorclass from . import tabular -from .. import ( - bones, - utils, - viscera, -) +from .. import bones, utils, viscera from ..utils.auth import try_getpass -from ..utils.profiles import ( - Profile, - ProfileStore, -) +from ..utils.profiles import Profile, ProfileStore PROG_DESCRIPTION = """\ @@ -46,19 +37,19 @@ Common commands: - maas login Log-in to a MAAS. - maas switch Switch the active profile. - maas machines List machines. - maas deploy Allocate and deploy machine. - maas release Release machine. - maas fabrics List fabrics. - maas subnets List subnets. + {program} login Log-in to a MAAS. + {program} switch Switch the active profile. + {program} machines List machines. + {program} deploy Allocate and deploy machine. + {program} release Release machine. + {program} fabrics List fabrics. + {program} subnets List subnets. Example help commands: - `maas help` This help page - `maas help commands` Lists all commands - `maas help deploy` Shows help for command 'deploy' + `{program} help` This help page + `{program} help commands` Lists all commands + `{program} help deploy` Shows help for command 'deploy' """ @@ -83,9 +74,7 @@ def read_input(message, validator=None, password=False): try: validator(value) except Exception as exc: - print( - colorized("{{autored}}Error: {{/autored}} %s") % - str(exc)) + print(colorized("{{autored}}Error: {{/autored}} %s") % str(exc)) else: return value else: @@ -97,9 +86,9 @@ def yes_or_no(question): while True: value = input(question) value = value.lower() - if value in ['y', 'yes']: + if value in ["y", "yes"]: return True - elif value in ['n', 'no']: + elif value in ["n", "no"]: return False @@ -108,14 +97,16 @@ def print_with_pager(output): if sys.stdout.isatty(): try: pager = subprocess.Popen( - ['less', '-F', '-r', '-S', '-X', '-K'], - stdin=subprocess.PIPE, stdout=sys.stdout) + ["less", "-F", "-r", "-S", "-X", "-K"], + stdin=subprocess.PIPE, + stdout=sys.stdout, + ) except subprocess.CalledProcessError: # Don't use the pager since starting it has failed. print(output) return else: - pager.stdin.write(output.encode('utf-8')) + pager.stdin.write(output.encode("utf-8")) pager.stdin.close() pager.wait() else: @@ -124,7 +115,8 @@ def print_with_pager(output): def get_profile_names_and_default() -> ( - typing.Tuple[typing.Sequence[str], typing.Optional[Profile]]): + typing.Tuple[typing.Sequence[str], typing.Optional[Profile]] +): """Return the list of profile names and the default profile object. The list of names is sorted. @@ -140,14 +132,12 @@ def get_profile_names_and_default() -> ( class MinimalHelpAction(argparse._HelpAction): - def __call__(self, parser, namespace, values, option_string=None): parser.print_minized_help() parser.exit() class PagedHelpAction(argparse._HelpAction): - def __call__(self, parser, namespace, values, option_string=None): print_with_pager(parser.format_help()) parser.exit() @@ -161,8 +151,7 @@ class HelpFormatter(argparse.RawDescriptionHelpFormatter): def _format_usage(self, usage, actions, groups, prefix): if prefix is None: prefix = "Usage: " - return super(HelpFormatter, self)._format_usage( - usage, actions, groups, prefix) + return super(HelpFormatter, self)._format_usage(usage, actions, groups, prefix) class ArgumentParser(argparse.ArgumentParser): @@ -177,8 +166,7 @@ class ArgumentParser(argparse.ArgumentParser): """ def add_subparsers(self): - raise NotImplementedError( - "add_subparsers has been disabled") + raise NotImplementedError("add_subparsers has been disabled") @property def subparsers(self): @@ -200,7 +188,8 @@ def add_argument_group(self, title, description=None): if title not in groups: groups[title] = super().add_argument_group( - title=title, description=description) + title=title, description=description + ) return groups[title] @@ -228,9 +217,7 @@ def print_minized_help(self, *, no_pager=False): description. The `help` action is used for provide more detail. """ formatter = self._get_formatter() - formatter.add_usage( - self.usage, self._actions, - self._mutually_exclusive_groups) + formatter.add_usage(self.usage, self._actions, self._mutually_exclusive_groups) formatter.add_text(self.description) if no_pager: print(formatter.format_help()) @@ -272,17 +259,21 @@ def register(cls, parser, name=None): """ help_title, help_body = utils.parse_docstring(cls) command_parser = parser.subparsers.add_parser( - cls.name() if name is None else name, help=help_title, - description=help_title, epilog=help_body, add_help=False, - formatter_class=HelpFormatter) + cls.name() if name is None else name, + help=help_title, + description=help_title, + epilog=help_body, + add_help=False, + formatter_class=HelpFormatter, + ) command_parser.add_argument( - "-h", "--help", action=PagedHelpAction, help=argparse.SUPPRESS) + "-h", "--help", action=PagedHelpAction, help=argparse.SUPPRESS + ) command_parser.set_defaults(execute=cls(command_parser)) return command_parser class TableCommand(Command): - def __init__(self, parser): super(TableCommand, self).__init__(parser) if sys.stdout.isatty(): @@ -290,8 +281,11 @@ def __init__(self, parser): else: default_target = tabular.RenderTarget.plain parser.other.add_argument( - "--format", type=tabular.RenderTarget, - choices=tabular.RenderTarget, default=default_target, help=( + "--format", + type=tabular.RenderTarget, + choices=tabular.RenderTarget, + default=default_target, + help=( "Output tabular data as a formatted table (pretty), a " "formatted table using only ASCII for borders (plain), or " "one of several dump formats. Default: %(default)s." @@ -300,55 +294,56 @@ def __init__(self, parser): class OriginCommandBase(Command): - def __init__(self, parser): super(OriginCommandBase, self).__init__(parser) parser.other.add_argument( - "--profile", dest="profile_name", metavar="NAME", - choices=PROFILE_NAMES, required=(PROFILE_DEFAULT is None), + "--profile", + dest="profile_name", + metavar="NAME", + choices=PROFILE_NAMES, + required=(PROFILE_DEFAULT is None), help=( "The name of the remote MAAS instance to use. Use " - "`profiles list` to obtain a list of valid profiles." + - ("" if PROFILE_DEFAULT is None else - " [default: %s]" % PROFILE_DEFAULT.name) - )) + "`profiles list` to obtain a list of valid profiles." + + ( + "" + if PROFILE_DEFAULT is None + else " [default: %s]" % PROFILE_DEFAULT.name + ) + ), + ) if PROFILE_DEFAULT is not None: parser.set_defaults(profile=PROFILE_DEFAULT.name) class OriginCommand(OriginCommandBase): - def __call__(self, options): session = bones.SessionAPI.fromProfileName(options.profile) origin = viscera.Origin(session) return self.execute(origin, options) def execute(self, origin, options): - raise NotImplementedError( - "Implement execute() in subclasses.") + raise NotImplementedError("Implement execute() in subclasses.") class OriginTableCommand(OriginCommandBase, TableCommand): - def __call__(self, options): session = bones.SessionAPI.fromProfileName(options.profile) origin = viscera.Origin(session) return self.execute(origin, options, target=options.format) def execute(self, origin, options, *, target): - raise NotImplementedError( - "Implement execute() in subclasses.") + raise NotImplementedError("Implement execute() in subclasses.") class OriginPagedTableCommand(OriginTableCommand): - def __init__(self, parser): super(OriginPagedTableCommand, self).__init__(parser) parser.other.add_argument( - "--no-pager", action='store_true', - help=( - "Don't use the pager when printing the output of the " - "command.")) + "--no-pager", + action="store_true", + help=("Don't use the pager when printing the output of the " "command."), + ) def __call__(self, options): return_code = 0 @@ -362,8 +357,9 @@ def __call__(self, options): pass else: raise TypeError( - "execute must return either tuple, int or str, not %s" % ( - type(output).__name__)) + "execute must return either tuple, int or str, not %s" + % (type(output).__name__) + ) if output: if options.no_pager: print(output) @@ -379,27 +375,25 @@ def __init__(self, parser, parent_parser): self.parent_parser = parent_parser super(cmd_help, self).__init__(parser) parser.add_argument( - "-h", "--help", action=PagedHelpAction, help=argparse.SUPPRESS) - parser.add_argument( - 'command', nargs='?', help="Show help for this command.") + "-h", "--help", action=PagedHelpAction, help=argparse.SUPPRESS + ) + parser.add_argument("command", nargs="?", help="Show help for this command.") parser.other.add_argument( - "--no-pager", action='store_true', - help=( - "Don't use the pager when printing the output of the " - "command.")) + "--no-pager", + action="store_true", + help=("Don't use the pager when printing the output of the " "command."), + ) def __call__(self, options): if options.command is None: self.parent_parser.print_minized_help(no_pager=options.no_pager) else: - command = self.parent_parser.subparsers.choices.get( - options.command, None) + command = self.parent_parser.subparsers.choices.get(options.command, None) if command is None: - if options.command == 'commands': + if options.command == "commands": self.print_all_commands(no_pager=options.no_pager) else: - self.parser.error( - "unknown command %s" % options.command) + self.parser.error("unknown command %s" % options.command) else: if options.no_pager: command.print_help() @@ -419,14 +413,13 @@ def print_all_commands(self, *, no_pager=False): for name in command_names: command = self.parent_parser.subparsers.choices[name] extra_padding = max_name_len - len(name) - command_line = '%s%s%s' % ( - name, ' ' * extra_padding, command.description) + command_line = "%s%s%s" % (name, " " * extra_padding, command.description) while len(command_line) > formatter._width: lines = textwrap.wrap(command_line, formatter._width) commands += "%s\n" % lines[0] if len(lines) > 1: - lines[1] = (' ' * max_name_len) + lines[1] - command_line = ' '.join(lines[1:]) + lines[1] = (" " * max_name_len) + lines[1] + command_line = " ".join(lines[1:]) else: command_line = None if command_line: @@ -445,9 +438,13 @@ def register(cls, parser, name=None): """ help_title, help_body = utils.parse_docstring(cls) command_parser = parser.subparsers.add_parser( - cls.name() if name is None else name, help=help_title, - description=help_title, epilog=help_body, add_help=False, - formatter_class=HelpFormatter) + cls.name() if name is None else name, + help=help_title, + description=help_title, + epilog=help_body, + add_help=False, + formatter_class=HelpFormatter, + ) command_parser.set_defaults(execute=cls(command_parser, parser)) return command_parser @@ -455,18 +452,30 @@ def register(cls, parser, name=None): def prepare_parser(program): """Create and populate an argument parser.""" parser = ArgumentParser( - description=PROG_DESCRIPTION, prog=program, + description=PROG_DESCRIPTION.format(program=program), + prog=program, formatter_class=HelpFormatter, - add_help=False) + add_help=False, + ) parser.add_argument( - "-h", "--help", action=MinimalHelpAction, help=argparse.SUPPRESS) + "-h", "--help", action=MinimalHelpAction, help=argparse.SUPPRESS + ) # Register sub-commands. submodules = ( - "nodes", "machines", "devices", "controllers", - "fabrics", "vlans", "subnets", "spaces", - "files", "tags", "users", - "profiles", "shell", + "nodes", + "machines", + "devices", + "controllers", + "fabrics", + "vlans", + "subnets", + "spaces", + "files", + "tags", + "users", + "profiles", + "shell", ) cmd_help.register(parser) for submodule in submodules: @@ -475,8 +484,8 @@ def prepare_parser(program): # Register global options. parser.add_argument( - '--debug', action='store_true', default=False, - help=argparse.SUPPRESS) + "--debug", action="store_true", default=False, help=argparse.SUPPRESS + ) return parser @@ -503,8 +512,16 @@ def post_mortem(traceback): post_mortem(traceback) +def program_name_from_env(program): + """Return the program name from environment.""" + if os.environ.get("SNAP_INSTANCE_NAME"): + return os.environ.get("SNAP_INSTANCE_NAME") + return program + + def main(argv=sys.argv): program, *arguments = argv + program = program_name_from_env(program) parser, options = None, None try: diff --git a/maas/client/flesh/controllers.py b/maas/client/flesh/controllers.py index 21adfdbb..f93b799f 100644 --- a/maas/client/flesh/controllers.py +++ b/maas/client/flesh/controllers.py @@ -1,19 +1,13 @@ """Commands for controllers.""" -__all__ = [ - "register", -] +__all__ = ["register"] import asyncio from itertools import chain -from . import ( - CommandError, - OriginPagedTableCommand, - tables, -) +from . import CommandError, OriginPagedTableCommand, tables from ..enum import NodeType -from ..utils.async import asynchronous +from ..utils.maas_async import asynchronous class cmd_controllers(OriginPagedTableCommand): @@ -21,8 +15,7 @@ class cmd_controllers(OriginPagedTableCommand): def __init__(self, parser): super(cmd_controllers, self).__init__(parser) - parser.add_argument("hostname", nargs='*', help=( - "Hostname of the controller.")) + parser.add_argument("hostname", nargs="*", help=("Hostname of the controller.")) @asynchronous async def execute(self, origin, options, target): @@ -32,7 +25,8 @@ async def execute(self, origin, options, target): controller_sets = await asyncio.gather( origin.RackControllers.read(hostnames=hostnames), - origin.RegionControllers.read(hostnames=hostnames)) + origin.RegionControllers.read(hostnames=hostnames), + ) controllers = { controller.system_id: controller for controller in chain.from_iterable(controller_sets) @@ -46,14 +40,12 @@ class cmd_controller(OriginPagedTableCommand): def __init__(self, parser): super(cmd_controller, self).__init__(parser) - parser.add_argument("hostname", nargs=1, help=( - "Hostname of the controller.")) + parser.add_argument("hostname", nargs=1, help=("Hostname of the controller.")) def execute(self, origin, options, target): nodes = origin.Nodes.read(hostnames=options.hostname) if len(nodes) == 0: - raise CommandError( - "Unable to find controller %s." % options.hostname[0]) + raise CommandError("Unable to find controller %s." % options.hostname[0]) node = nodes[0] if node.node_type == NodeType.RACK_CONTROLLER: table = tables.ControllerDetail() @@ -65,8 +57,7 @@ def execute(self, origin, options, target): table = tables.ControllerDetail() node = node.as_rack_controller() else: - raise CommandError( - "Unable to find controller %s." % options.hostname[0]) + raise CommandError("Unable to find controller %s." % options.hostname[0]) return table.render(target, node) diff --git a/maas/client/flesh/devices.py b/maas/client/flesh/devices.py index e3cf38db..92d80e5f 100644 --- a/maas/client/flesh/devices.py +++ b/maas/client/flesh/devices.py @@ -1,14 +1,8 @@ """Commands for devices.""" -__all__ = [ - "register", -] +__all__ = ["register"] -from . import ( - CommandError, - OriginPagedTableCommand, - tables, -) +from . import CommandError, OriginPagedTableCommand, tables class cmd_devices(OriginPagedTableCommand): @@ -16,10 +10,10 @@ class cmd_devices(OriginPagedTableCommand): def __init__(self, parser): super(cmd_devices, self).__init__(parser) - parser.add_argument("hostname", nargs='*', help=( - "Hostname of the device.")) - parser.add_argument("--owned", action="store_true", help=( - "Show only machines owned by you.")) + parser.add_argument("hostname", nargs="*", help=("Hostname of the device.")) + parser.add_argument( + "--owned", action="store_true", help=("Show only machines owned by you.") + ) def execute(self, origin, options, target): hostnames = None @@ -28,12 +22,13 @@ def execute(self, origin, options, target): devices = origin.Devices.read(hostnames=hostnames) if options.owned: me = origin.Users.whoami() - devices = origin.Devices([ - device - for device in devices - if device.owner is not None and - device.owner.username == me.username - ]) + devices = origin.Devices( + [ + device + for device in devices + if device.owner is not None and device.owner.username == me.username + ] + ) table = tables.DevicesTable() return table.render(target, devices) @@ -43,14 +38,12 @@ class cmd_device(OriginPagedTableCommand): def __init__(self, parser): super(cmd_device, self).__init__(parser) - parser.add_argument("hostname", nargs=1, help=( - "Hostname of the device.")) + parser.add_argument("hostname", nargs=1, help=("Hostname of the device.")) def execute(self, origin, options, target): devices = origin.Devices.read(hostnames=options.hostname) if len(devices) == 0: - raise CommandError( - "Unable to find device %s." % options.hostname[0]) + raise CommandError("Unable to find device %s." % options.hostname[0]) device = devices[0] table = tables.DeviceDetail() return table.render(target, device) diff --git a/maas/client/flesh/fabrics.py b/maas/client/flesh/fabrics.py index a7dab3f3..301751ee 100644 --- a/maas/client/flesh/fabrics.py +++ b/maas/client/flesh/fabrics.py @@ -1,18 +1,12 @@ """Commands for fabrics.""" -__all__ = [ - "register", -] +__all__ = ["register"] from http import HTTPStatus -from . import ( - CommandError, - OriginPagedTableCommand, - tables, -) +from . import CommandError, OriginPagedTableCommand, tables from ..bones import CallError -from ..utils.async import asynchronous +from ..utils.maas_async import asynchronous class cmd_fabrics(OriginPagedTableCommand): @@ -20,8 +14,9 @@ class cmd_fabrics(OriginPagedTableCommand): def __init__(self, parser): super(cmd_fabrics, self).__init__(parser) - parser.add_argument("--minimal", action="store_true", help=( - "Output only the fabric names.")) + parser.add_argument( + "--minimal", action="store_true", help=("Output only the fabric names.") + ) @asynchronous async def load_object_sets(self, origin): @@ -32,11 +27,9 @@ async def load_object_sets(self, origin): def execute(self, origin, options, target): visible_columns = None if options.minimal: - visible_columns = ('name',) + visible_columns = ("name",) fabrics, subnets = self.load_object_sets(origin) - table = tables.FabricsTable( - visible_columns=visible_columns, - subnets=subnets) + table = tables.FabricsTable(visible_columns=visible_columns, subnets=subnets) return table.render(target, fabrics) @@ -45,8 +38,7 @@ class cmd_fabric(OriginPagedTableCommand): def __init__(self, parser): super(cmd_fabric, self).__init__(parser) - parser.add_argument("name", nargs=1, help=( - "Name of the fabric.")) + parser.add_argument("name", nargs=1, help=("Name of the fabric.")) @asynchronous async def load_object_sets(self, origin): @@ -59,8 +51,7 @@ def execute(self, origin, options, target): fabric = origin.Fabric.read(options.name[0]) except CallError as error: if error.status == HTTPStatus.NOT_FOUND: - raise CommandError( - "Unable to find fabric %s." % options.name[0]) + raise CommandError("Unable to find fabric %s." % options.name[0]) else: raise fabrics, subnets = self.load_object_sets(origin) diff --git a/maas/client/flesh/files.py b/maas/client/flesh/files.py index ea3581a6..e7ae5765 100644 --- a/maas/client/flesh/files.py +++ b/maas/client/flesh/files.py @@ -1,13 +1,8 @@ """Commands for files.""" -__all__ = [ - "register", -] +__all__ = ["register"] -from . import ( - OriginPagedTableCommand, - tables, -) +from . import OriginPagedTableCommand, tables class cmd_files(OriginPagedTableCommand): diff --git a/maas/client/flesh/machines.py b/maas/client/flesh/machines.py index 0eec2cf6..c914fff2 100644 --- a/maas/client/flesh/machines.py +++ b/maas/client/flesh/machines.py @@ -1,8 +1,6 @@ """Commands for machines.""" -__all__ = [ - "register", -] +__all__ = ["register"] import asyncio import base64 @@ -18,12 +16,12 @@ OriginCommand, OriginPagedTableCommand, tables, - yes_or_no + yes_or_no, ) from .. import utils from ..bones import CallError from ..enum import NodeStatus -from ..utils.async import asynchronous +from ..utils.maas_async import asynchronous def validate_file(parser, arg): @@ -35,7 +33,7 @@ def validate_file(parser, arg): def base64_file(filepath): """Read from `filepath` and convert to base64.""" - with open(filepath, 'rb') as stream: + with open(filepath, "rb") as stream: return base64.b64encode(stream.read()) @@ -44,10 +42,10 @@ class cmd_machines(OriginPagedTableCommand): def __init__(self, parser): super(cmd_machines, self).__init__(parser) - parser.add_argument("hostname", nargs='*', help=( - "Hostname of the machine.")) - parser.add_argument("--owned", action="store_true", help=( - "Show only machines owned by you.")) + parser.add_argument("hostname", nargs="*", help=("Hostname of the machine.")) + parser.add_argument( + "--owned", action="store_true", help=("Show only machines owned by you.") + ) def execute(self, origin, options, target): hostnames = None @@ -56,12 +54,14 @@ def execute(self, origin, options, target): machines = origin.Machines.read(hostnames=hostnames) if options.owned: me = origin.Users.whoami() - machines = origin.Machines([ - machine - for machine in machines - if machine.owner is not None and - machine.owner.username == me.username - ]) + machines = origin.Machines( + [ + machine + for machine in machines + if machine.owner is not None + and machine.owner.username == me.username + ] + ) table = tables.MachinesTable() return table.render(target, machines) @@ -71,14 +71,12 @@ class cmd_machine(OriginPagedTableCommand): def __init__(self, parser): super(cmd_machine, self).__init__(parser) - parser.add_argument("hostname", nargs=1, help=( - "Hostname of the machine.")) + parser.add_argument("hostname", nargs=1, help=("Hostname of the machine.")) def execute(self, origin, options, target): machines = origin.Machines.read(hostnames=options.hostname) if len(machines) == 0: - raise CommandError( - "Unable to find machine %s." % options.hostname[0]) + raise CommandError("Unable to find machine %s." % options.hostname[0]) machine = machines[0] table = tables.MachineDetail() return table.render(target, machine) @@ -176,65 +174,122 @@ class cmd_allocate(OriginCommand): """ def __init__( - self, parser, with_hostname=True, with_comment=True, - with_dry_run=True): + self, parser, with_hostname=True, with_comment=True, with_dry_run=True + ): super(cmd_allocate, self).__init__(parser) if with_hostname: - parser.add_argument("hostname", nargs='?', help=( - "Hostname of the machine.")) - parser.add_argument("--arch", nargs="*", help=( - "Architecture(s) of the desired machine (e.g. 'i386/generic', " - "'amd64', 'armhf/highbank', etc.)")) - parser.add_argument("--cpus", type=int, help=( - "Minimum number of CPUs for the desired machine.")) - parser.add_argument("--disk", nargs="*", help=( - "Disk(s) the desired machine must match.")) - parser.add_argument("--fabric", nargs="*", help=( - "Fabric(s) the desired machine must be connected to.")) - parser.add_argument("--interface", nargs="*", help=( - "Interface(s) the desired machine must match.")) - parser.add_argument("--memory", type=float, help=( - "Minimum amount of memory (expressed in MB) for the desired " - "machine.")) - parser.add_argument("--pod", help=( - "Pod the desired machine must be located in.")) - parser.add_argument("--pod-type", help=( - "Pod type the desired machine must be located in.")) - parser.add_argument("--subnet", nargs="*", help=( - "Subnet(s) the desired machine must be linked to.")) - parser.add_argument("--tag", nargs="*", help=( - "Tags the desired machine must match.")) - parser.add_argument("--zone", help=( - "Zone the desired machine must be located in.")) - parser.add_argument("--not-fabric", nargs="*", help=( - "Fabric(s) the desired machine must NOT be connected to.")) - parser.add_argument("--not-subnet", nargs="*", help=( - "Subnets(s) the desired machine must NOT be linked to.")) - parser.add_argument("--not-tag", nargs="*", help=( - "Tags the desired machine must NOT match.")) - parser.add_argument("--not-zone", nargs="*", help=( - "Zone(s) the desired machine must NOT belong in.")) - parser.other.add_argument("--agent-name", help=( - "Agent name to attach to the acquire machine.")) + parser.add_argument( + "hostname", nargs="?", help=("Hostname of the machine.") + ) + parser.add_argument( + "--arch", + nargs="*", + help=( + "Architecture(s) of the desired machine (e.g. 'i386/generic', " + "'amd64', 'armhf/highbank', etc.)" + ), + ) + parser.add_argument( + "--cpus", type=int, help=("Minimum number of CPUs for the desired machine.") + ) + parser.add_argument( + "--disk", nargs="*", help=("Disk(s) the desired machine must match.") + ) + parser.add_argument( + "--fabric", + nargs="*", + help=("Fabric(s) the desired machine must be connected to."), + ) + parser.add_argument( + "--interface", + nargs="*", + help=("Interface(s) the desired machine must match."), + ) + parser.add_argument( + "--memory", + type=float, + help=( + "Minimum amount of memory (expressed in MB) for the desired " "machine." + ), + ) + parser.add_argument( + "--pod", help=("Pod the desired machine must be located in.") + ) + parser.add_argument( + "--pod-type", help=("Pod type the desired machine must be located in.") + ) + parser.add_argument( + "--subnet", + nargs="*", + help=("Subnet(s) the desired machine must be linked to."), + ) + parser.add_argument( + "--tag", nargs="*", help=("Tags the desired machine must match.") + ) + parser.add_argument( + "--zone", help=("Zone the desired machine must be located in.") + ) + parser.add_argument( + "--not-fabric", + nargs="*", + help=("Fabric(s) the desired machine must NOT be connected to."), + ) + parser.add_argument( + "--not-subnet", + nargs="*", + help=("Subnets(s) the desired machine must NOT be linked to."), + ) + parser.add_argument( + "--not-tag", nargs="*", help=("Tags the desired machine must NOT match.") + ) + parser.add_argument( + "--not-zone", + nargs="*", + help=("Zone(s) the desired machine must NOT belong in."), + ) + parser.other.add_argument( + "--agent-name", help=("Agent name to attach to the acquire machine.") + ) if with_comment: - parser.other.add_argument("--comment", help=( - "Reason for allocating the machine.")) + parser.other.add_argument( + "--comment", help=("Reason for allocating the machine.") + ) parser.other.add_argument( - "--bridge-all", action='store_true', default=None, help=( + "--bridge-all", + action="store_true", + default=None, + help=( "Automatically create a bridge on all interfaces on the " - "allocated machine.")) + "allocated machine." + ), + ) parser.other.add_argument( - "--bridge-stp", action='store_true', default=None, help=( + "--bridge-stp", + action="store_true", + default=None, + help=( "Turn spaning tree protocol on or off for the bridges created " - "with --bridge-all.")) - parser.other.add_argument("--bridge-fd", type=int, help=( - "Set the forward delay in seconds on the bridges created with " - "--bridge-all.")) + "with --bridge-all." + ), + ) + parser.other.add_argument( + "--bridge-fd", + type=int, + help=( + "Set the forward delay in seconds on the bridges created with " + "--bridge-all." + ), + ) if with_dry_run: parser.other.add_argument( - "--dry-run", action='store_true', default=None, help=( + "--dry-run", + action="store_true", + default=None, + help=( "Don't actually acquire the machine just return the " - "machine that would have been acquired.")) + "machine that would have been acquired." + ), + ) @asynchronous async def allocate(self, origin, options): @@ -242,51 +297,52 @@ async def allocate(self, origin, options): me = await origin.Users.whoami() machines = await origin.Machines.read(hostnames=[options.hostname]) if len(machines) == 0: - raise CommandError( - "Unable to find machine %s." % options.hostname) + raise CommandError("Unable to find machine %s." % options.hostname) machine = machines[0] - if (machine.status == NodeStatus.ALLOCATED and - machine.owner.username == me.username): + if ( + machine.status == NodeStatus.ALLOCATED + and machine.owner.username == me.username + ): return False, machine elif machine.status != NodeStatus.READY: - raise CommandError( - "Unable to allocate machine %s." % options.hostname) - params = utils.remove_None({ - 'hostname': options.hostname, - 'architectures': options.arch, - 'cpus': options.cpus, - 'memory': options.memory, - 'fabrics': options.fabric, - 'interfaces': options.interface, - 'pod': options.pod, - 'pod_type': options.pod_type, - 'subnets': options.subnet, - 'tags': options.tag, - 'not_fabrics': options.not_fabric, - 'not_subnets': options.not_subnet, - 'not_zones': options.not_zone, - 'agent_name': options.agent_name, - 'comment': options.comment, - 'bridge_all': options.bridge_all, - 'bridge_stp': options.bridge_stp, - 'bridge_fd': options.bridge_fd, - 'dry_run': getattr(options, 'dry_run', False), - }) + raise CommandError("Unable to allocate machine %s." % options.hostname) + params = utils.remove_None( + { + "hostname": options.hostname, + "architectures": options.arch, + "cpus": options.cpus, + "memory": options.memory, + "fabrics": options.fabric, + "interfaces": options.interface, + "pod": options.pod, + "pod_type": options.pod_type, + "subnets": options.subnet, + "tags": options.tag, + "not_fabrics": options.not_fabric, + "not_subnets": options.not_subnet, + "not_zones": options.not_zone, + "agent_name": options.agent_name, + "comment": options.comment, + "bridge_all": options.bridge_all, + "bridge_stp": options.bridge_stp, + "bridge_fd": options.bridge_fd, + "dry_run": getattr(options, "dry_run", False), + } + ) machine = await origin.Machines.allocate(**params) if options.hostname and machine.hostname != options.hostname: await machine.release() raise CommandError( "MAAS failed to allocate machine %s; " - "instead it allocated %s." % ( - options.hostname, machine.hostname)) + "instead it allocated %s." % (options.hostname, machine.hostname) + ) return True, machine def execute(self, origin, options): with utils.Spinner() as context: context.msg = colorized("{automagenta}Allocating{/automagenta}") _, machine = self.allocate(origin, options) - print(colorized( - "{autoblue}Allocated{/autoblue} %s") % machine.hostname) + print(colorized("{autoblue}Allocated{/autoblue} %s") % machine.hostname) class MachineWorkMixin: @@ -294,20 +350,19 @@ class MachineWorkMixin: @asynchronous async def _async_perform_action( - self, context, action, machines, params, - progress_title, success_title): - + self, context, action, machines, params, progress_title, success_title + ): def _update_msg(remaining): """Update the spinner message.""" if len(remaining) == 1: msg = remaining[0].hostname elif len(remaining) == 2: - msg = "%s and %s" % ( - remaining[0].hostname, remaining[1].hostname) + msg = "%s and %s" % (remaining[0].hostname, remaining[1].hostname) else: msg = "%s machines" % len(remaining) context.msg = colorized( - "{autoblue}%s{/autoblue} %s" % (progress_title, msg)) + "{autoblue}%s{/autoblue} %s" % (progress_title, msg) + ) async def _perform(machine, params, remaining): """Updates the messages as actions complete.""" @@ -316,60 +371,50 @@ async def _perform(machine, params, remaining): except Exception as exc: remaining.remove(machine) _update_msg(remaining) - context.print( - colorized("{autored}Error:{/autored} %s") % str(exc)) + context.print(colorized("{autored}Error:{/autored} %s") % str(exc)) raise else: remaining.remove(machine) _update_msg(remaining) - context.print(colorized( - "{autogreen}%s{/autogreen} %s") % ( - success_title, machine.hostname)) + context.print( + colorized("{autogreen}%s{/autogreen} %s") + % (success_title, machine.hostname) + ) _update_msg(machines) - results = await asyncio.gather(*[ - _perform(machine, params, machines) - for machine in machines - ], return_exceptions=True) - failures = [ - result - for result in results - if isinstance(result, Exception) - ] + results = await asyncio.gather( + *[_perform(machine, params, machines) for machine in machines], + return_exceptions=True + ) + failures = [result for result in results if isinstance(result, Exception)] if len(failures) > 0: return 1 return 0 - def perform_action( - self, action, machines, params, progress_title, success_title): + def perform_action(self, action, machines, params, progress_title, success_title): """Perform the action on the set of machines.""" if len(machines) == 0: return 0 with utils.Spinner() as context: return self._async_perform_action( - context, action, list(machines), params, - progress_title, success_title) + context, action, list(machines), params, progress_title, success_title + ) def get_machines(self, origin, hostnames): """Return a set of machines based on `hostnames`. Any hostname that is not found will result in an error. """ - hostnames = { - hostname: True - for hostname in hostnames - } + hostnames = {hostname: True for hostname in hostnames} machines = origin.Machines.read(hostnames=hostnames) machines = [ - machine - for machine in machines - if hostnames.pop(machine.hostname, False) + machine for machine in machines if hostnames.pop(machine.hostname, False) ] if len(hostnames) > 0: raise CommandError( - "Unable to find %s %s." % ( - "machines" if len(hostnames) > 1 else "machine", - ','.join(hostnames))) + "Unable to find %s %s." + % ("machines" if len(hostnames) > 1 else "machine", ",".join(hostnames)) + ) return machines @@ -379,11 +424,13 @@ class MachineSSHMixin: def add_ssh_options(self, parser): """Add the SSH arguments to the `parser`.""" parser.add_argument( - "--username", metavar='USER', help=( - "Username for the SSH connection.")) + "--username", metavar="USER", help=("Username for the SSH connection.") + ) parser.add_argument( - "--boot-only", action="store_true", help=( - "Only use the IP addresses on the machine's boot interface.")) + "--boot-only", + action="store_true", + help=("Only use the IP addresses on the machine's boot interface."), + ) def get_ip_addresses(self, machine, *, boot_only=False, discovered=False): """Return all IP address for `machine`. @@ -391,9 +438,7 @@ def get_ip_addresses(self, machine, *, boot_only=False, discovered=False): IP address from `boot_interface` come first. """ boot_ips = [ - link.ip_address - for link in machine.boot_interface.links - if link.ip_address + link.ip_address for link in machine.boot_interface.links if link.ip_address ] if boot_only: if boot_ips: @@ -411,8 +456,7 @@ def get_ip_addresses(self, machine, *, boot_only=False, discovered=False): link.ip_address for interface in machine.interfaces for link in interface.links - if (interface.id != machine.boot_interface.id and - link.ip_address) + if (interface.id != machine.boot_interface.id and link.ip_address) ] ips = boot_ips + other_ips if ips: @@ -426,8 +470,7 @@ def get_ip_addresses(self, machine, *, boot_only=False, discovered=False): link.ip_address for interface in machine.interfaces for link in interface.discovered - if (interface.id != machine.boot_interface.id and - link.ip_address) + if (interface.id != machine.boot_interface.id and link.ip_address) ] else: return [] @@ -439,25 +482,21 @@ async def _async_get_sshable_ips(self, ip_addresses): async def _async_ping(ip_address): try: reader, writer = await asyncio.wait_for( - asyncio.open_connection(ip_address, 22), timeout=5) + asyncio.open_connection(ip_address, 22), timeout=5 + ) except (OSError, TimeoutError): return None try: line = await reader.readline() finally: writer.close() - if line.startswith(b'SSH-'): + if line.startswith(b"SSH-"): return ip_address - ssh_ips = await asyncio.gather(*[ - _async_ping(ip_address) - for ip_address in ip_addresses - ]) - return [ - ip_address - for ip_address in ssh_ips - if ip_address is not None - ] + ssh_ips = await asyncio.gather( + *[_async_ping(ip_address) for ip_address in ip_addresses] + ) + return [ip_address for ip_address in ssh_ips if ip_address is not None] def _check_ssh(self, *args): """Check if SSH connection can be made to IP with username.""" @@ -465,83 +504,97 @@ def _check_ssh(self, *args): args, stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL) + stderr=subprocess.DEVNULL, + ) ssh.wait() return ssh.returncode == 0 def _determine_username(self, ip): """SSH in as root and determine the username.""" - ssh = subprocess.Popen([ - "ssh", - "-o", "UserKnownHostsFile=/dev/null", - "-o", "StrictHostKeyChecking=no", - "root@%s" % ip], + ssh = subprocess.Popen( + [ + "ssh", + "-o", + "UserKnownHostsFile=/dev/null", + "-o", + "StrictHostKeyChecking=no", + "root@%s" % ip, + ], stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL) + stderr=subprocess.DEVNULL, + ) first_line = ssh.stdout.readline() ssh.kill() ssh.wait() if first_line: match = re.search( r"Please login as the user \"(\w+)\" rather than " - r"the user \"root\".", first_line.decode('utf-8')) + r"the user \"root\".", + first_line.decode("utf-8"), + ) if match: return match.groups()[0] else: return None def ssh( - self, machine, *, - username=None, command=None, boot_only=False, discovered=False, - wait=300): + self, + machine, + *, + username=None, + command=None, + boot_only=False, + discovered=False, + wait=300 + ): """SSH into `machine`.""" start_time = time.monotonic() with utils.Spinner() as context: context.msg = colorized( - "{autoblue}Determining{/autoblue} best IP for %s" % ( - machine.hostname)) + "{autoblue}Determining{/autoblue} best IP for %s" % (machine.hostname) + ) ip_addresses = self.get_ip_addresses( - machine, boot_only=boot_only, discovered=discovered) + machine, boot_only=boot_only, discovered=discovered + ) if len(ip_addresses) > 0: pingable_ips = self._async_get_sshable_ips(ip_addresses) - while (len(pingable_ips) == 0 and - (time.monotonic() - start_time) < wait): + while len(pingable_ips) == 0 and (time.monotonic() - start_time) < wait: time.sleep(5) pingable_ips = self._async_get_sshable_ips(ip_addresses) if len(pingable_ips) == 0: raise CommandError( - "No IP addresses on %s can be reached." % ( - machine.hostname)) + "No IP addresses on %s can be reached." % (machine.hostname) + ) else: ip = pingable_ips[0] else: - raise CommandError( - "%s has no IP addresses." % machine.hostname) + raise CommandError("%s has no IP addresses." % machine.hostname) if username is None: context.msg = colorized( - "{autoblue}Determining{/autoblue} SSH username on %s" % ( - machine.hostname)) + "{autoblue}Determining{/autoblue} SSH username on %s" + % (machine.hostname) + ) username = self._determine_username(ip) - while (username is None and - (time.monotonic() - start_time) < wait): + while username is None and (time.monotonic() - start_time) < wait: username = self._determine_username(ip) if username is None: - raise CommandError( - "Failed to determine the username for SSH.") + raise CommandError("Failed to determine the username for SSH.") conn_str = "%s@%s" % (username, ip) args = [ "ssh", - "-o", "UserKnownHostsFile=/dev/null", - "-o", "StrictHostKeyChecking=no", - conn_str + "-o", + "UserKnownHostsFile=/dev/null", + "-o", + "StrictHostKeyChecking=no", + conn_str, ] context.msg = colorized( - "{automagenta}Waiting{/automagenta} for SSH on %s" % ( - machine.hostname)) + "{automagenta}Waiting{/automagenta} for SSH on %s" % (machine.hostname) + ) check_args = args + ["echo"] connectable = self._check_ssh(*check_args) while not connectable and (time.monotonic() - start_time) < wait: @@ -549,13 +602,14 @@ def ssh( connectable = self._check_ssh(*check_args) if not connectable: raise CommandError( - "SSH never started on %s using IP %s." % ( - machine.hostname, ip)) + "SSH never started on %s using IP %s." % (machine.hostname, ip) + ) if command is not None: args.append(command) ssh = subprocess.Popen( - args, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr) + args, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr + ) ssh.wait() return ssh.returncode @@ -564,26 +618,41 @@ class MachineReleaseMixin(MachineWorkMixin): """Mixin that provide releasing machines.""" def add_release_options(self, parser): - parser.add_argument('--erase', action='store_true', help=( - "Erase the disk when releasing.")) - parser.add_argument('--secure-erase', action='store_true', help=( - "Use the drives secure erase feature if available on the disk.")) - parser.add_argument('--quick-erase', action='store_true', help=( - "Wipe the just the beginning and end of the disk. " - "This is not secure.")) + parser.add_argument( + "--erase", action="store_true", help=("Erase the disk when releasing.") + ) + parser.add_argument( + "--secure-erase", + action="store_true", + help=("Use the drives secure erase feature if available on the disk."), + ) + parser.add_argument( + "--quick-erase", + action="store_true", + help=( + "Wipe the just the beginning and end of the disk. " + "This is not secure." + ), + ) def get_release_params(self, options): - return utils.remove_None({ - 'erase': options.erase, - 'secure_erase': options.secure_erase, - 'quick_erase': options.quick_erase, - }) + return utils.remove_None( + { + "erase": options.erase, + "secure_erase": options.secure_erase, + "quick_erase": options.quick_erase, + } + ) def release(self, machines, params): - wait = params.get('wait', True) + wait = params.get("wait", True) return self.perform_action( - "release", machines, params, "Releasing", - "Released" if wait else "Releasing") + "release", + machines, + params, + "Releasing", + "Released" if wait else "Releasing", + ) class cmd_deploy(cmd_allocate, MachineSSHMixin, MachineReleaseMixin): @@ -594,59 +663,83 @@ class cmd_deploy(cmd_allocate, MachineSSHMixin, MachineReleaseMixin): def __init__(self, parser): super(cmd_deploy, self).__init__( - parser, with_hostname=False, with_comment=False, - with_dry_run=False) - parser.add_argument("image", nargs='?', help=( - "Image to deploy to the machine (e.g. ubuntu/xenial or " - "just xenial).")) - parser.add_argument("hostname", nargs='?', help=( - "Hostname of the machine.")) - parser.add_argument( - "--hwe-kernel", help=( + parser, with_hostname=False, with_comment=False, with_dry_run=False + ) + parser.add_argument( + "image", + nargs="?", + help=( + "Image to deploy to the machine (e.g. ubuntu/xenial or " "just xenial)." + ), + ) + parser.add_argument("hostname", nargs="?", help=("Hostname of the machine.")) + parser.add_argument( + "--hwe-kernel", + help=( "Hardware enablement kernel to use with the image. Only used " - "when deploying Ubuntu.")) + "when deploying Ubuntu." + ), + ) parser.add_argument( - "--user-data", metavar="FILE", + "--user-data", + metavar="FILE", type=lambda arg: validate_file(parser, arg), - help=( - "User data that gets run on the machine once it has " - "deployed.")) + help=("User data that gets run on the machine once it has " "deployed."), + ) parser.add_argument( - "--b64-user-data", metavar="BASE64", help=( + "--b64-user-data", + metavar="BASE64", + help=( "Base64 encoded string of the user data that gets run on the " - "machine once it has deployed.")) + "machine once it has deployed." + ), + ) parser.add_argument( - "--ssh", action="store_true", help=( - "SSH into the machine once its deployed.")) + "--ssh", + action="store_true", + help=("SSH into the machine once its deployed."), + ) self.add_ssh_options(parser) parser.add_argument( - "--release-on-exit", action="store_true", help=( + "--release-on-exit", + action="store_true", + help=( "Release the machine once the SSH connection is closed. " - "Only used with --ssh is provided.")) + "Only used with --ssh is provided." + ), + ) self.add_release_options(parser) - parser.other.add_argument("--comment", help=( - "Reason for deploying the machine.")) parser.other.add_argument( - "--no-wait", action="store_true", help=( - "Don't wait for the deploy to complete.")) + "--comment", help=("Reason for deploying the machine.") + ) + parser.other.add_argument( + "--no-wait", + action="store_true", + help=("Don't wait for the deploy to complete."), + ) + parser.other.add_argument( + "--install-kvm", action="store_true", help=("Install KVM on machine") + ) def _get_deploy_options(self, options): """Return the deployment options based on command line.""" user_data = None if options.user_data and options.b64_user_data: - raise CommandError( - "Cannot provide both --user-data and --b64-user-data.") + raise CommandError("Cannot provide both --user-data and --b64-user-data.") if options.b64_user_data: user_data = options.b64_user_data if options.user_data: user_data = base64_file(options.user_data).decode("ascii") - return utils.remove_None({ - 'distro_series': options.image, - 'hwe_kernel': options.hwe_kernel, - 'user_data': user_data, - 'comment': options.comment, - 'wait': False, - }) + return utils.remove_None( + { + "distro_series": options.image, + "hwe_kernel": options.hwe_kernel, + "user_data": user_data, + "comment": options.comment, + "install_kvm": options.install_kvm, + "wait": False, + } + ) def _handle_abort(self, machine, allocated): """Handle the user aborting mid deployment.""" @@ -654,21 +747,23 @@ def _handle_abort(self, machine, allocated): if abort: with utils.Spinner() as context: if allocated: - context.msg = colorized( - "{autoblue}Releasing{/autoblue} %s") % ( - machine.hostname) + context.msg = colorized("{autoblue}Releasing{/autoblue} %s") % ( + machine.hostname + ) machine.release() - context.print(colorized( - "{autoblue}Released{/autoblue} %s") % ( - machine.hostname)) + context.print( + colorized("{autoblue}Released{/autoblue} %s") + % (machine.hostname) + ) else: - context.msg = colorized( - "{autoblue}Aborting{/autoblue} %s") % ( - machine.hostname) + context.msg = colorized("{autoblue}Aborting{/autoblue} %s") % ( + machine.hostname + ) machine.abort() - context.print(colorized( - "{autoblue}Aborted{/autoblue} %s") % ( - machine.hostname)) + context.print( + colorized("{autoblue}Aborted{/autoblue} %s") + % (machine.hostname) + ) def execute(self, origin, options): deploy_options = self._get_deploy_options(options) @@ -676,14 +771,15 @@ def execute(self, origin, options): try: with utils.Spinner() as context: if options.hostname: - context.msg = colorized( - "{autoblue}Allocating{/autoblue} %s") % ( - options.hostname) + context.msg = colorized("{autoblue}Allocating{/autoblue} %s") % ( + options.hostname + ) else: context.msg = colorized("{autoblue}Searching{/autoblue}") allocated, machine = self.allocate(origin, options) - context.msg = colorized( - "{autoblue}Deploying{/autoblue} %s") % machine.hostname + context.msg = ( + colorized("{autoblue}Deploying{/autoblue} %s") % machine.hostname + ) try: machine = machine.deploy(**deploy_options) except CallError: @@ -692,11 +788,18 @@ def execute(self, origin, options): raise if not options.no_wait: context.msg = colorized( - "{autoblue}Deploying{/autoblue} %s on %s") % ( - machine.distro_series, machine.hostname) + "{autoblue}Deploying{/autoblue} %s on %s" + ) % (machine.distro_series, machine.hostname) while machine.status == NodeStatus.DEPLOYING: time.sleep(15) machine.refresh() + context.msg = colorized( + "{autoblue}Deploying{/autoblue} %s on %s: %s" + ) % ( + machine.distro_series, + machine.hostname, + machine.status_message, + ) except KeyboardInterrupt: if sys.stdout.isatty() and machine is not None: self._handle_abort(machine, allocated) @@ -704,26 +807,30 @@ def execute(self, origin, options): if machine.status == NodeStatus.FAILED_DEPLOYMENT: raise CommandError( - "Deployment of %s on %s failed." % ( - machine.distro_series, machine.hostname)) + "Deployment of %s on %s failed." + % (machine.distro_series, machine.hostname) + ) elif machine.status == NodeStatus.DEPLOYED: - print(colorized( - "{autoblue}Deployed{/autoblue} %s on %s") % ( - machine.distro_series, machine.hostname)) + print( + colorized("{autoblue}Deployed{/autoblue} %s on %s") + % (machine.distro_series, machine.hostname) + ) elif machine.status == NodeStatus.DEPLOYING: - print(colorized( - "{autoblue}Deploying{/autoblue} %s on %s") % ( - machine.distro_series, machine.hostname)) + print( + colorized("{autoblue}Deploying{/autoblue} %s on %s") + % (machine.distro_series, machine.hostname) + ) else: raise CommandError( - "Machine %s transitioned to an unexpected state of %s." % ( - machine.hostname, machine.status_name)) + "Machine %s transitioned to an unexpected state of %s." + % (machine.hostname, machine.status_name) + ) if options.ssh: machine.refresh() code = self.ssh( - machine, username=options.username, - boot_only=options.boot_only) + machine, username=options.username, boot_only=options.boot_only + ) if code == 0 and options.release_on_exit: release_params = self.get_release_params(options) release_params["wait"] = True @@ -735,26 +842,50 @@ class cmd_commission(OriginCommand, MachineSSHMixin, MachineWorkMixin): def __init__(self, parser): super(cmd_commission, self).__init__(parser) - parser.add_argument("hostname", nargs="*", help=( - "Hostname of the machine to commission.")) - parser.add_argument("--all", action="store_true", help=( - "Commission all machines that can be commissioned.")) - parser.add_argument("--new", action="store_true", help=( - "Commission all new machines.")) - parser.add_argument("--skip-networking", action="store_true", help=( - "Skip machine network discovery, keeping the current interface " - "configuration for the machine.")) - parser.add_argument("--skip-storage", action="store_true", help=( - "Skip machine storage discovery, keeping the current storage " - "configuration for the machine.")) - parser.add_argument("--scripts", nargs="*", metavar="SCRIPT", help=( - "Run only the selected commissioning scripts.")) - parser.add_argument("--ssh", action="store_true", help=( - "SSH into the machine during commissioning.")) + parser.add_argument( + "hostname", nargs="*", help=("Hostname of the machine to commission.") + ) + parser.add_argument( + "--all", + action="store_true", + help=("Commission all machines that can be commissioned."), + ) + parser.add_argument( + "--new", action="store_true", help=("Commission all new machines.") + ) + parser.add_argument( + "--skip-networking", + action="store_true", + help=( + "Skip machine network discovery, keeping the current interface " + "configuration for the machine." + ), + ) + parser.add_argument( + "--skip-storage", + action="store_true", + help=( + "Skip machine storage discovery, keeping the current storage " + "configuration for the machine." + ), + ) + parser.add_argument( + "--scripts", + nargs="*", + metavar="SCRIPT", + help=("Run only the selected commissioning scripts."), + ) + parser.add_argument( + "--ssh", + action="store_true", + help=("SSH into the machine during commissioning."), + ) self.add_ssh_options(parser) parser.other.add_argument( - "--no-wait", action="store_true", help=( - "Don't wait for the commisisoning to complete.")) + "--no-wait", + action="store_true", + help=("Don't wait for the commisisoning to complete."), + ) def execute(self, origin, options): if options.hostname and options.all: @@ -763,57 +894,62 @@ def execute(self, origin, options): raise CommandError("Cannot pass both hostname and --new.") if not options.hostname and not options.all and not options.new: raise CommandError("Missing parameter hostname, --all, or --new.") - if (options.ssh and - (len(options.hostname) > 1 or options.all or options.new)): - raise CommandError( - "--ssh can only be used when commissioning one machine.") + if options.ssh and (len(options.hostname) > 1 or options.all or options.new): + raise CommandError("--ssh can only be used when commissioning one machine.") if options.all: machines = origin.Machines.read() machines = [ machine for machine in machines - if machine.status in [ - NodeStatus.NEW, NodeStatus.READY, - NodeStatus.FAILED_COMMISSIONING] + if machine.status + in [NodeStatus.NEW, NodeStatus.READY, NodeStatus.FAILED_COMMISSIONING] ] elif options.new: machines = origin.Machines.read() machines = [ - machine - for machine in machines - if machine.status == NodeStatus.NEW + machine for machine in machines if machine.status == NodeStatus.NEW ] else: machines = self.get_machines(origin, options.hostname) - params = utils.remove_None({ - 'enable_ssh': options.ssh, - 'skip_networking': options.skip_networking, - 'skip_storage': options.skip_storage, - 'commissioning_scripts': options.scripts, - 'wait': False if options.no_wait else True - }) + params = utils.remove_None( + { + "enable_ssh": options.ssh, + "skip_networking": options.skip_networking, + "skip_storage": options.skip_storage, + "commissioning_scripts": options.scripts, + "wait": False if options.no_wait else True, + } + ) try: rc = self.perform_action( - "commission", machines, params, + "commission", + machines, + params, "Commissioning", - "Commissioning" if options.no_wait else "Commissioned") + "Commissioning" if options.no_wait else "Commissioned", + ) except KeyboardInterrupt: if sys.stdout.isatty(): abort = yes_or_no("Abort commissioning?") if abort: return self.perform_action( - "abort", machines, {}, "Aborting", "Aborted") + "abort", machines, {}, "Aborting", "Aborted" + ) else: return 1 if rc == 0 and len(machines) > 0 and options.ssh: machine = machines[0] machine.refresh() rc = self.ssh( - machine, username=options.username, - boot_only=options.boot_only, discovered=True) + machine, + username=options.username, + boot_only=options.boot_only, + discovered=True, + ) if rc == 0: return self.perform_action( - "power_off", [machine], {}, "Powering off", "Powered off") + "power_off", [machine], {}, "Powering off", "Powered off" + ) return rc @@ -822,16 +958,19 @@ class cmd_release(OriginCommand, MachineReleaseMixin): def __init__(self, parser): super(cmd_release, self).__init__(parser) - parser.add_argument("hostname", nargs="*", help=( - "Hostname of the machine to release.")) - parser.add_argument('--all', action='store_true', help=( - "Release all machines owned by you.")) - parser.add_argument('--comment', help=( - "Reason for releasing the machine.")) + parser.add_argument( + "hostname", nargs="*", help=("Hostname of the machine to release.") + ) + parser.add_argument( + "--all", action="store_true", help=("Release all machines owned by you.") + ) + parser.add_argument("--comment", help=("Reason for releasing the machine.")) self.add_release_options(parser) parser.other.add_argument( - "--no-wait", action="store_true", help=( - "Don't wait for the release to complete.")) + "--no-wait", + action="store_true", + help=("Don't wait for the release to complete."), + ) def execute(self, origin, options): if options.hostname and options.all: @@ -839,17 +978,21 @@ def execute(self, origin, options): if not options.hostname and not options.all: raise CommandError("Missing parameter hostname or --all.") params = self.get_release_params(options) - params['wait'] = False if options.no_wait else True + params["wait"] = False if options.no_wait else True if options.all: me = origin.Users.whoami() machines = origin.Machines.read() machines = [ machine for machine in machines - if (machine.owner is not None and - machine.owner.username == me.username and ( - machine.status not in [ - NodeStatus.COMMISSIONING, NodeStatus.TESTING])) + if ( + machine.owner is not None + and machine.owner.username == me.username + and ( + machine.status + not in [NodeStatus.COMMISSIONING, NodeStatus.TESTING] + ) + ) ] else: machines = self.get_machines(origin, options.hostname) @@ -861,18 +1004,15 @@ class cmd_abort(OriginCommand, MachineWorkMixin): def __init__(self, parser): super(cmd_abort, self).__init__(parser) - parser.add_argument("hostname", nargs="+", help=( - "Hostname of the machine to abort the action.")) - parser.add_argument('--comment', help=( - "Reason for aborting the action.")) + parser.add_argument( + "hostname", nargs="+", help=("Hostname of the machine to abort the action.") + ) + parser.add_argument("--comment", help=("Reason for aborting the action.")) def execute(self, origin, options): - params = utils.remove_None({ - "comment": options.comment, - }) + params = utils.remove_None({"comment": options.comment}) machines = self.get_machines(origin, options.hostname) - return self.perform_action( - "abort", machines, params, "Aborting", "Aborted") + return self.perform_action("abort", machines, params, "Aborting", "Aborted") class cmd_mark_fixed(OriginCommand, MachineWorkMixin): @@ -880,15 +1020,16 @@ class cmd_mark_fixed(OriginCommand, MachineWorkMixin): def __init__(self, parser): super(cmd_mark_fixed, self).__init__(parser) - parser.add_argument("hostname", nargs="+", help=( - "Hostname of the machine to mark fixed.")) - parser.add_argument('--comment', help=( - "Reason for marking the machine fixed.")) + parser.add_argument( + "hostname", nargs="+", help=("Hostname of the machine to mark fixed.") + ) + parser.add_argument("--comment", help=("Reason for marking the machine fixed.")) def execute(self, origin, options): machines = self.get_machines(origin, options.hostname) return self.perform_action( - "mark_fixed", machines, {}, "Marking fixed", "Marked fixed") + "mark_fixed", machines, {}, "Marking fixed", "Marked fixed" + ) class cmd_mark_broken(OriginCommand, MachineWorkMixin): @@ -896,15 +1037,18 @@ class cmd_mark_broken(OriginCommand, MachineWorkMixin): def __init__(self, parser): super(cmd_mark_broken, self).__init__(parser) - parser.add_argument("hostname", nargs="+", help=( - "Hostname of the machine to mark broken.")) - parser.add_argument('--comment', help=( - "Reason for marking the machine broken.")) + parser.add_argument( + "hostname", nargs="+", help=("Hostname of the machine to mark broken.") + ) + parser.add_argument( + "--comment", help=("Reason for marking the machine broken.") + ) def execute(self, origin, options): machines = self.get_machines(origin, options.hostname) return self.perform_action( - "mark_broken", machines, {}, "Marking broken", "Marked broken") + "mark_broken", machines, {}, "Marking broken", "Marked broken" + ) class cmd_ssh(OriginCommand, MachineWorkMixin, MachineSSHMixin): @@ -912,17 +1056,25 @@ class cmd_ssh(OriginCommand, MachineWorkMixin, MachineSSHMixin): def __init__(self, parser): super(cmd_ssh, self).__init__(parser) - parser.add_argument("hostname", nargs=1, help=( - "Hostname of the machine to SSH to.")) - parser.add_argument("command", nargs="?", default=None, help=( - "Hostname of the machine to SSH to.")) + parser.add_argument( + "hostname", nargs=1, help=("Hostname of the machine to SSH to.") + ) + parser.add_argument( + "command", + nargs="?", + default=None, + help=("Hostname of the machine to SSH to."), + ) self.add_ssh_options(parser) def execute(self, origin, options): machine = self.get_machines(origin, options.hostname)[0] return self.ssh( - machine, username=options.username, - command=options.command, boot_only=options.boot_only) + machine, + username=options.username, + command=options.command, + boot_only=options.boot_only, + ) class cmd_power_on(OriginCommand, MachineWorkMixin): @@ -930,15 +1082,16 @@ class cmd_power_on(OriginCommand, MachineWorkMixin): def __init__(self, parser): super(cmd_power_on, self).__init__(parser) - parser.add_argument("hostname", nargs="+", help=( - "Hostname of the machine to power on.")) - parser.add_argument('--comment', help=( - "Reason for powering the machine on.")) + parser.add_argument( + "hostname", nargs="+", help=("Hostname of the machine to power on.") + ) + parser.add_argument("--comment", help=("Reason for powering the machine on.")) def execute(self, origin, options): machines = self.get_machines(origin, options.hostname) return self.perform_action( - "power_on", machines, {}, "Powering on", "Powered on") + "power_on", machines, {}, "Powering on", "Powered on" + ) class cmd_power_off(OriginCommand, MachineWorkMixin): @@ -946,15 +1099,16 @@ class cmd_power_off(OriginCommand, MachineWorkMixin): def __init__(self, parser): super(cmd_power_off, self).__init__(parser) - parser.add_argument("hostname", nargs="+", help=( - "Hostname of the machine to power off.")) - parser.add_argument('--comment', help=( - "Reason for powering the machine off.")) + parser.add_argument( + "hostname", nargs="+", help=("Hostname of the machine to power off.") + ) + parser.add_argument("--comment", help=("Reason for powering the machine off.")) def execute(self, origin, options): machines = self.get_machines(origin, options.hostname) return self.perform_action( - "power_off", machines, {}, "Powering off", "Powered off") + "power_off", machines, {}, "Powering off", "Powered off" + ) def register(parser): diff --git a/maas/client/flesh/nodes.py b/maas/client/flesh/nodes.py index 6334cded..90c7ac16 100644 --- a/maas/client/flesh/nodes.py +++ b/maas/client/flesh/nodes.py @@ -1,14 +1,8 @@ """Commands for nodes.""" -__all__ = [ - "register", -] +__all__ = ["register"] -from . import ( - CommandError, - OriginPagedTableCommand, - tables, -) +from . import CommandError, OriginPagedTableCommand, tables from ..enum import NodeType @@ -17,8 +11,7 @@ class cmd_nodes(OriginPagedTableCommand): def __init__(self, parser): super(cmd_nodes, self).__init__(parser) - parser.add_argument("hostname", nargs='*', help=( - "Hostname of the node.")) + parser.add_argument("hostname", nargs="*", help=("Hostname of the node.")) def execute(self, origin, options, target): hostnames = None @@ -33,14 +26,12 @@ class cmd_node(OriginPagedTableCommand): def __init__(self, parser): super(cmd_node, self).__init__(parser) - parser.add_argument("hostname", nargs=1, help=( - "Hostname of the node.")) + parser.add_argument("hostname", nargs=1, help=("Hostname of the node.")) def execute(self, origin, options, target): nodes = origin.Nodes.read(hostnames=options.hostname) if len(nodes) == 0: - raise CommandError( - "Unable to find node %s." % options.hostname[0]) + raise CommandError("Unable to find node %s." % options.hostname[0]) node = nodes[0] if node.node_type == NodeType.MACHINE: table = tables.MachineDetail(with_type=True) diff --git a/maas/client/flesh/profiles.py b/maas/client/flesh/profiles.py index e3e753cb..195479b2 100644 --- a/maas/client/flesh/profiles.py +++ b/maas/client/flesh/profiles.py @@ -1,8 +1,6 @@ """Commands for working with local profiles.""" -__all__ = [ - "register", -] +__all__ = ["register"] import sys @@ -16,16 +14,10 @@ TableCommand, tables, ) -from .. import ( - bones, - utils, -) +from .. import bones, utils from ..bones import helpers -from ..utils import ( - auth, - profiles, -) -from ..utils.async import asynchronous +from ..utils import auth, profiles +from ..utils.maas_async import asynchronous class cmd_login(Command): @@ -39,63 +31,121 @@ class cmd_login(Command): def __init__(self, parser): super(cmd_login, self).__init__(parser) parser.add_argument( - "-p", "--profile-name", default=None, help=( + "-p", + "--profile-name", + default=None, + help=( "The name to give the profile. Default is the username used " - "to login.")) + "to login." + ), + ) parser.add_argument( - '--apikey', default=None, help=( + "--anonymous", + default=False, + action="store_true", + help=( + "Create an anonymous profile, no credentials are associated " "to it." + ), + ) + parser.add_argument( + "--apikey", + default=None, + help=( "The API key acquired from MAAS. This requires the profile " - "name to be provided as well.")) + "name to be provided as well." + ), + ) parser.add_argument( - '-k', '--insecure', action='store_true', help=( - "Disable SSL certificate check"), default=False) + "-k", + "--insecure", + action="store_true", + help=("Disable SSL certificate check"), + default=False, + ) parser.add_argument( - "url", nargs="?", type=utils.api_url, help=( + "url", + nargs="?", + type=utils.api_url, + help=( "The URL of the API, e.g. http://example.com/MAAS/ " "or http://example.com/MAAS/api/2.0/ if you wish to specify " "the API version. If no URL is provided then it will be " - "prompted for, interactively.")) + "prompted for, interactively." + ), + ) parser.add_argument( - "username", nargs="?", default=None, help=( + "username", + nargs="?", + default=None, + help=( "The username used to login to MAAS. If no username is " "provided and API key is not being used it will be prompted " - "for, interactively.")) + "for, interactively." + ), + ) parser.add_argument( - "password", nargs="?", default=None, help=( + "password", + nargs="?", + default=None, + help=( "The password used to login to MAAS. If no password is " "proviced and API key is not being used it will be promoed " - "for, interactively.")) + "for, interactively." + ), + ) @asynchronous async def __call__(self, options): - if options.apikey and not options.profile_name: + has_auth_info = any((options.apikey, options.username, options.password)) + if options.anonymous and has_auth_info: raise ValueError( - "-p,--profile-name must be provided with --apikey") + "Can't specify username, password or--apikey with --anonymous" + ) + + if options.apikey and not options.profile_name: + raise ValueError("-p,--profile-name must be provided with --apikey") + if not options.url: url = read_input("URL", validator=utils.api_url) else: url = options.url + if not options.apikey: - if not options.username: - username = read_input("Username") - else: - username = options.username - if not options.password: + if options.anonymous: + password = None + elif options.username and not options.password: password = read_input("Password", password=True) else: password = options.password - if password == '-': + if password == "-": password = sys.stdin.readline().strip() - profile = await helpers.login( - url, username=username, password=password, - insecure=options.insecure) + try: + profile = await helpers.login( + url, + anonymous=options.anonymous, + username=options.username, + password=password, + insecure=options.insecure, + ) + except helpers.MacaroonLoginNotSupported: + # the server doesn't have external authentication enabled, + # propmt for username/password + username = read_input("Username") + password = read_input("Password", password=True) + profile = await helpers.login( + url, username=username, password=password, insecure=options.insecure + ) else: credentials = auth.obtain_credentials(options.apikey) session = await bones.SessionAPI.fromURL( - url, credentials=credentials, insecure=options.insecure) + url, credentials=credentials, insecure=options.insecure + ) profile = profiles.Profile( - options.profile_name, url, credentials=credentials, - description=session.description) + options.profile_name, + url, + credentials=credentials, + description=session.description, + ) if options.profile_name: profile = profile.replace(name=options.profile_name) @@ -116,7 +166,7 @@ def print_whats_next(profile): "with the profile name {{autoblue}}{profile.name}{{/autoblue}}.", "For help with the available commands, try:", " maas help", - ] + ] for message in what_next: message = message.format(profile=profile) print(colorized(message)) @@ -133,10 +183,13 @@ class cmd_logout(Command): def __init__(self, parser): super(cmd_logout, self).__init__(parser) parser.add_argument( - "profile_name", metavar="profile-name", - nargs="?", choices=PROFILE_NAMES, help=( - "The profile name you want to logout of." + - ("" if PROFILE_DEFAULT is None else " [default: %(default)s]") + "profile_name", + metavar="profile-name", + nargs="?", + choices=PROFILE_NAMES, + help=( + "The profile name you want to logout of." + + ("" if PROFILE_DEFAULT is None else " [default: %(default)s]") ), ) if PROFILE_DEFAULT is not None: @@ -157,10 +210,10 @@ class cmd_switch(Command): def __init__(self, parser): super(cmd_switch, self).__init__(parser) parser.add_argument( - "profile_name", metavar="profile-name", choices=PROFILE_NAMES, - help=( - "The profile name you want to switch to." - ), + "profile_name", + metavar="profile-name", + choices=PROFILE_NAMES, + help=("The profile name you want to switch to."), ) def __call__(self, options): @@ -175,16 +228,20 @@ class cmd_profiles(TableCommand): def __init__(self, parser): super(cmd_profiles, self).__init__(parser) parser.add_argument( - "--refresh", action='store_true', default=False, help=( + "--refresh", + action="store_true", + default=False, + help=( "Retrieves the latest version of the help information for " "all profiles. Use it to update your command-line client's " - "information after an upgrade to the MAAS server."), + "information after an upgrade to the MAAS server." + ), ) parser.other.add_argument( - "--no-pager", action='store_true', - help=( - "Don't use the pager when printing the output of the " - "command.")) + "--no-pager", + action="store_true", + help=("Don't use the pager when printing the output of the " "command."), + ) def __call__(self, options): if options.refresh: diff --git a/maas/client/flesh/shell.py b/maas/client/flesh/shell.py index 42d2c8b1..b966d4af 100644 --- a/maas/client/flesh/shell.py +++ b/maas/client/flesh/shell.py @@ -1,25 +1,14 @@ """Commands for running interactive and non-interactive shells.""" -__all__ = [ - "register", -] +__all__ = ["register"] import code import sys import textwrap import tokenize -from . import ( - colorized, - Command, - PROFILE_DEFAULT, - PROFILE_NAMES, -) -from .. import ( - bones, - facade, - viscera, -) +from . import colorized, Command, PROFILE_DEFAULT, PROFILE_NAMES +from .. import bones, facade, viscera class cmd_shell(Command): @@ -31,8 +20,7 @@ class cmd_shell(Command): """ profile_name_choices = PROFILE_NAMES - profile_name_default = ( - None if PROFILE_DEFAULT is None else PROFILE_DEFAULT.name) + profile_name_default = None if PROFILE_DEFAULT is None else PROFILE_DEFAULT.name def __init__(self, parser): super(cmd_shell, self).__init__(parser) @@ -42,41 +30,64 @@ def __init__(self, parser): # message instead of something more cryptic. Note that the help # string differs too. parser.add_argument( - "--profile-name", metavar="NAME", required=False, - default=None, help=( + "--profile-name", + metavar="NAME", + required=False, + default=None, + help=( "The name of the remote MAAS instance to use. " "No profiles are currently defined; use the `profiles` " "command to create one." - )) + ), + ) else: parser.add_argument( - "--profile-name", metavar="NAME", required=False, + "--profile-name", + metavar="NAME", + required=False, choices=self.profile_name_choices, - default=self.profile_name_default, help=( - "The name of the remote MAAS instance to use." + ( - "" if self.profile_name_default is None + default=self.profile_name_default, + help=( + "The name of the remote MAAS instance to use." + + ( + "" + if self.profile_name_default is None else " [default: %(default)s]" ) - )) + ), + ) parser.add_argument( - "--viscera", action="store_true", default=False, help=( + "--viscera", + action="store_true", + default=False, + help=( "Create a pre-canned viscera `Origin` for the selected " "profile. This is available as `origin` in the shell's " "namespace. You probably do not need this unless you're " "developing python-libmaas itself." - )) + ), + ) parser.add_argument( - "--bones", action="store_true", default=False, help=( + "--bones", + action="store_true", + default=False, + help=( "Create a pre-canned bones `Session` for the selected " "profile. This is available as `session` in the shell's " "namespace. You probably do not need this unless you're " "developing python-libmaas itself." - )) + ), + ) parser.add_argument( - "script", metavar="SCRIPT", nargs="?", default=None, help=( + "script", + metavar="SCRIPT", + nargs="?", + default=None, + help=( "Python script to run in the shell's namespace. An " "interactive shell is started if none is given." - )) + ), + ) def __call__(self, options): """Execute this command.""" @@ -93,19 +104,19 @@ def __call__(self, options): if options.bones: namespace["session"] = session descriptions["session"] = ( - "A pre-canned `bones` session for '%s'." - % options.profile_name) + "A pre-canned `bones` session for '%s'." % options.profile_name + ) origin = viscera.Origin(session) if options.viscera: namespace["origin"] = origin descriptions["origin"] = ( - "A pre-canned `viscera` origin for '%s'." - % options.profile_name) + "A pre-canned `viscera` origin for '%s'." % options.profile_name + ) client = facade.Client(origin) namespace["client"] = client descriptions["client"] = ( - "A pre-canned client for '%s'." - % options.profile_name) + "A pre-canned client for '%s'." % options.profile_name + ) if options.script is None: if sys.stdin.isatty() and sys.stdout.isatty(): @@ -137,8 +148,7 @@ def _run_interactive(namespace, descriptions): except ImportError: code.InteractiveConsole(namespace).interact(" ") else: - IPython.start_ipython( - argv=[], display_banner=False, user_ns=namespace) + IPython.start_ipython(argv=[], display_banner=False, user_ns=namespace) @staticmethod def _run_script(namespace, filename): diff --git a/maas/client/flesh/spaces.py b/maas/client/flesh/spaces.py index 0979b8f1..1d37c1c9 100644 --- a/maas/client/flesh/spaces.py +++ b/maas/client/flesh/spaces.py @@ -1,18 +1,12 @@ """Commands for spaces.""" -__all__ = [ - "register", -] +__all__ = ["register"] from http import HTTPStatus -from . import ( - CommandError, - OriginPagedTableCommand, - tables, -) +from . import CommandError, OriginPagedTableCommand, tables from ..bones import CallError -from ..utils.async import asynchronous +from ..utils.maas_async import asynchronous class cmd_spaces(OriginPagedTableCommand): @@ -20,8 +14,9 @@ class cmd_spaces(OriginPagedTableCommand): def __init__(self, parser): super(cmd_spaces, self).__init__(parser) - parser.add_argument("--minimal", action="store_true", help=( - "Output only the space names.")) + parser.add_argument( + "--minimal", action="store_true", help=("Output only the space names.") + ) @asynchronous async def load_object_sets(self, origin): @@ -33,10 +28,11 @@ async def load_object_sets(self, origin): def execute(self, origin, options, target): visible_columns = None if options.minimal: - visible_columns = ('name',) + visible_columns = ("name",) spaces, fabrics, subnets = self.load_object_sets(origin) table = tables.SpacesTable( - visible_columns=visible_columns, fabrics=fabrics, subnets=subnets) + visible_columns=visible_columns, fabrics=fabrics, subnets=subnets + ) return table.render(target, spaces) @@ -45,8 +41,7 @@ class cmd_space(OriginPagedTableCommand): def __init__(self, parser): super(cmd_space, self).__init__(parser) - parser.add_argument("name", nargs=1, help=( - "Name of the space.")) + parser.add_argument("name", nargs=1, help=("Name of the space.")) @asynchronous async def load_object_sets(self, origin): @@ -59,8 +54,7 @@ def execute(self, origin, options, target): space = origin.Space.read(options.name[0]) except CallError as error: if error.status == HTTPStatus.NOT_FOUND: - raise CommandError( - "Unable to find space %s." % options.name[0]) + raise CommandError("Unable to find space %s." % options.name[0]) else: raise fabrics, subnets = self.load_object_sets(origin) diff --git a/maas/client/flesh/subnets.py b/maas/client/flesh/subnets.py index cca57dee..c8f7a094 100644 --- a/maas/client/flesh/subnets.py +++ b/maas/client/flesh/subnets.py @@ -1,18 +1,12 @@ """Commands for subnets.""" -__all__ = [ - "register", -] +__all__ = ["register"] from http import HTTPStatus -from . import ( - CommandError, - OriginPagedTableCommand, - tables, -) +from . import CommandError, OriginPagedTableCommand, tables from ..bones import CallError -from ..utils.async import asynchronous +from ..utils.maas_async import asynchronous class cmd_subnets(OriginPagedTableCommand): @@ -20,8 +14,9 @@ class cmd_subnets(OriginPagedTableCommand): def __init__(self, parser): super(cmd_subnets, self).__init__(parser) - parser.add_argument("--minimal", action="store_true", help=( - "Output only the subnet names.")) + parser.add_argument( + "--minimal", action="store_true", help=("Output only the subnet names.") + ) @asynchronous async def load_object_sets(self, origin): @@ -32,10 +27,9 @@ async def load_object_sets(self, origin): def execute(self, origin, options, target): visible_columns = None if options.minimal: - visible_columns = ('name',) + visible_columns = ("name",) subnets, fabrics = self.load_object_sets(origin) - table = tables.SubnetsTable( - visible_columns=visible_columns, fabrics=fabrics) + table = tables.SubnetsTable(visible_columns=visible_columns, fabrics=fabrics) return table.render(target, subnets) @@ -44,16 +38,14 @@ class cmd_subnet(OriginPagedTableCommand): def __init__(self, parser): super(cmd_subnet, self).__init__(parser) - parser.add_argument("name", nargs=1, help=( - "Name of the subnet.")) + parser.add_argument("name", nargs=1, help=("Name of the subnet.")) def execute(self, origin, options, target): try: subnet = origin.Subnet.read(options.name[0]) except CallError as error: if error.status == HTTPStatus.NOT_FOUND: - raise CommandError( - "Unable to find subnet %s." % options.name[0]) + raise CommandError("Unable to find subnet %s." % options.name[0]) else: raise table = tables.SubnetDetail(fabrics=origin.Fabrics.read()) diff --git a/maas/client/flesh/tables.py b/maas/client/flesh/tables.py index 2bb3099c..70047590 100644 --- a/maas/client/flesh/tables.py +++ b/maas/client/flesh/tables.py @@ -1,33 +1,13 @@ """Tables for representing information from MAAS.""" -__all__ = [ - "FilesTable", - "NodesTable", - "ProfilesTable", - "TagsTable", - "UsersTable", -] - -from operator import ( - attrgetter, - itemgetter -) +__all__ = ["FilesTable", "NodesTable", "ProfilesTable", "TagsTable", "UsersTable"] + +from operator import attrgetter, itemgetter from colorclass import Color -from ..enum import ( - InterfaceType, - NodeType, - PowerState, - RDNSMode, -) -from .tabular import ( - Column, - RenderTarget, - Table, - DetailTable, - NestedTableColumn, -) +from ..enum import InterfaceType, NodeType, PowerState, RDNSMode +from .tabular import Column, RenderTarget, Table, DetailTable, NestedTableColumn class NodeTypeColumn(Column): @@ -49,11 +29,10 @@ def render(self, target, node_type): class NodeArchitectureColumn(Column): - def render(self, target, architecture): if target in (RenderTarget.pretty, RenderTarget.plain): if architecture: - if architecture.endswith('/generic'): + if architecture.endswith("/generic"): architecture = architecture[:-8] else: architecture = "-" @@ -61,7 +40,6 @@ def render(self, target, architecture): class NodeCPUsColumn(Column): - def render(self, target, cpus): # `cpus` is a count of CPUs. if target in (RenderTarget.pretty, RenderTarget.plain): @@ -73,7 +51,6 @@ def render(self, target, cpus): class NodeMemoryColumn(Column): - def render(self, target, memory): # `memory` is in MB. if target in (RenderTarget.pretty, RenderTarget.plain): @@ -115,8 +92,7 @@ def render(self, target, datum): if target == RenderTarget.pretty: if datum in self.colours: colour = self.colours[datum] - return Color("{%s}%s{/%s}" % ( - colour, datum, colour)) + return Color("{%s}%s{/%s}" % (colour, datum, colour)) else: return datum else: @@ -135,8 +111,7 @@ def render(self, target, data): if target == RenderTarget.pretty: if data in self.colours: colour = self.colours[data] - return Color("{%s}%s{/%s}" % ( - colour, data.value.capitalize(), colour)) + return Color("{%s}%s{/%s}" % (colour, data.value.capitalize(), colour)) else: return data.value.capitalize() elif target == RenderTarget.plain: @@ -146,7 +121,6 @@ def render(self, target, data): class NodeInterfacesColumn(Column): - def render(self, target, data): count = 0 for interface in data: @@ -156,7 +130,6 @@ def render(self, target, data): class NodeOwnerColumn(Column): - def render(self, target, data): if data is None: return super().render(target, "(none)") @@ -165,7 +138,6 @@ def render(self, target, data): class NodeImageColumn(Column): - def render(self, target, data): if data: return super().render(target, data) @@ -174,29 +146,35 @@ def render(self, target, data): class NodeZoneColumn(Column): + def render(self, target, data): + return super().render(target, data.name) + +class NodeResourcePoolColumn(Column): def render(self, target, data): return super().render(target, data.name) -class NodesTable(Table): +class NodeTagsColumn(Column): + def render(self, target, data): + if data: + return super().render(target, [tag.name for tag in data]) + else: + return "" + +class NodesTable(Table): def __init__(self): super().__init__( - Column("hostname", "Hostname"), - NodeTypeColumn("node_type", "Type"), + Column("hostname", "Hostname"), NodeTypeColumn("node_type", "Type") ) def get_rows(self, target, nodes): - data = ( - (node.hostname, node.node_type) - for node in nodes - ) + data = ((node.hostname, node.node_type) for node in nodes) return sorted(data, key=itemgetter(0)) class MachinesTable(Table): - def __init__(self): super().__init__( Column("hostname", "Hostname"), @@ -206,6 +184,8 @@ def __init__(self): NodeArchitectureColumn("architecture", "Arch"), NodeCPUsColumn("cpus", "#CPUs"), NodeMemoryColumn("memory", "RAM"), + NodeResourcePoolColumn("pool", "Resource pool"), + NodeZoneColumn("zone", "Zone"), ) def get_rows(self, target, machines): @@ -218,6 +198,8 @@ def get_rows(self, target, machines): machine.architecture, machine.cpus, machine.memory, + machine.pool, + machine.zone, ) for machine in machines ) @@ -225,7 +207,6 @@ def get_rows(self, target, machines): class MachineDetail(DetailTable): - def __init__(self, with_type=False): self.with_type = with_type columns = [ @@ -239,9 +220,10 @@ def __init__(self, with_type=False): NodeMemoryColumn("memory", "RAM"), NodeInterfacesColumn("interfaces", "Interfaces"), Column("ip_addresses", "IP addresses"), + NodeResourcePoolColumn("pool", "Resource pool"), NodeZoneColumn("zone", "Zone"), NodeOwnerColumn("owner", "Owner"), - Column("tags", "Tags"), + NodeTagsColumn("tags", "Tags"), ] if with_type: columns.insert(1, NodeTypeColumn("node_type", "Type")) @@ -264,6 +246,7 @@ def get_rows(self, target, machine): for link in interface.links if link.ip_address ], + machine.pool, machine.zone, machine.owner, machine.tags, @@ -274,7 +257,6 @@ def get_rows(self, target, machine): class DevicesTable(Table): - def __init__(self): super().__init__( Column("hostname", "Hostname"), @@ -292,7 +274,7 @@ def get_rows(self, target, devices): for interface in device.interfaces for link in interface.links if link.ip_address - ] + ], ) for device in devices ) @@ -300,7 +282,6 @@ def get_rows(self, target, devices): class DeviceDetail(DetailTable): - def __init__(self, with_type=False): self.with_type = with_type columns = [ @@ -333,7 +314,6 @@ def get_rows(self, target, device): class ControllersTable(Table): - def __init__(self): super().__init__( Column("hostname", "Hostname"), @@ -358,7 +338,6 @@ def get_rows(self, target, controllers): class ControllerDetail(DetailTable): - def __init__(self): super().__init__( Column("hostname", "Hostname"), @@ -383,7 +362,6 @@ def get_rows(self, target, controller): class TagsTable(Table): - def __init__(self): super().__init__( Column("name", "Tag name"), @@ -394,18 +372,14 @@ def __init__(self): def get_rows(self, target, tags): data = ( - (tag.name, tag.definition, tag.kernel_opts, tag.comment) - for tag in tags + (tag.name, tag.definition, tag.kernel_opts, tag.comment) for tag in tags ) return sorted(data, key=itemgetter(0)) class FilesTable(Table): - def __init__(self): - super().__init__( - Column("filename", "File name"), - ) + super().__init__(Column("filename", "File name")) def get_rows(self, target, files): data = ((f.filename,) for f in files) @@ -427,7 +401,6 @@ def render(self, target, is_admin): class UsersTable(Table): - def __init__(self): super().__init__( Column("username", "User name"), @@ -441,7 +414,6 @@ def get_rows(self, target, users): class ProfileActiveColumn(Column): - def render(self, target, is_anonymous): if target is RenderTarget.pretty: return "✓" if is_anonymous else " " @@ -452,7 +424,6 @@ def render(self, target, is_anonymous): class ProfilesTable(Table): - def __init__(self): super().__init__( Column("name", "Profile"), @@ -471,10 +442,9 @@ def get_rows(self, target, profiles): class VIDColumn(Column): - def render(self, target, data): - vlan, vlans = data, data._data['fabric'].vlans - vlans = sorted(vlans, key=attrgetter('id')) + vlan, vlans = data, data._data["fabric"].vlans + vlans = sorted(vlans, key=attrgetter("id")) if vlans[0] == vlan: if vlan.vid == 0: data = "untagged" @@ -486,7 +456,6 @@ def render(self, target, data): class DHCPColumn(Column): - def render(self, target, vlan): if vlan.dhcp_on: if vlan.primary_rack: @@ -506,7 +475,6 @@ def render(self, target, vlan): class SpaceNameColumn(Column): - def render(self, target, space): name = space.name if name == "undefined": @@ -515,7 +483,6 @@ def render(self, target, space): class SubnetNameColumn(Column): - def render(self, target, subnet): name = subnet.cidr if subnet.name and subnet.name != name: @@ -524,19 +491,17 @@ def render(self, target, subnet): class SubnetActiveColumn(Column): - def render(self, target, active): if active: text = "Active" else: text = "Disabled" if target == RenderTarget.pretty and active: - text = Color('{autogreen}Active{/autogreen}') + text = Color("{autogreen}Active{/autogreen}") return super().render(target, text) class SubnetRDNSModeColumn(Column): - def render(self, target, mode): if mode == RDNSMode.DISABLED: text = "Disabled" @@ -551,7 +516,6 @@ def render(self, target, mode): class SubnetsTable(Table): - def __init__(self, *, visible_columns=None, fabrics=None): self.fabrics = fabrics super().__init__( @@ -559,7 +523,7 @@ def __init__(self, *, visible_columns=None, fabrics=None): VIDColumn("vid", "VID"), Column("fabric", "Fabric"), SpaceNameColumn("space", "Space"), - visible_columns=visible_columns + visible_columns=visible_columns, ) def get_vlan(self, vlan): @@ -581,12 +545,11 @@ def get_rows(self, target, subnets): self.get_fabric(subnet.vlan.fabric).name, subnet.vlan.space, ) - for subnet in sorted(subnets, key=attrgetter('cidr')) + for subnet in sorted(subnets, key=attrgetter("cidr")) ) class SubnetDetail(DetailTable): - def __init__(self, *, fabrics=None): self.fabrics = fabrics super().__init__( @@ -631,42 +594,36 @@ def get_rows(self, target, subnet): class VlansTable(Table): - def __init__(self, *, visible_columns=None, fabrics=None, subnets=None): self.subnets = subnets super().__init__( VIDColumn("vid", "VID"), DHCPColumn("dhcp", "DHCP"), SpaceNameColumn("space", "Space"), - NestedTableColumn("subnets", "Subnets", SubnetsTable, None, { - 'visible_columns': ('name',), - 'fabrics': fabrics, - }), - visible_columns=visible_columns + NestedTableColumn( + "subnets", + "Subnets", + SubnetsTable, + None, + {"visible_columns": ("name",), "fabrics": fabrics}, + ), + visible_columns=visible_columns, ) def get_subnets(self, vlan): """Return the subnets for the `vlan`.""" - return vlan._origin.Subnets([ - subnet - for subnet in self.subnets - if subnet.vlan.id == vlan.id - ]) + return vlan._origin.Subnets( + [subnet for subnet in self.subnets if subnet.vlan.id == vlan.id] + ) def get_rows(self, target, vlans): return ( - ( - vlan, - vlan, - vlan.space, - self.get_subnets(vlan) - ) - for vlan in sorted(vlans, key=attrgetter('vid')) + (vlan, vlan, vlan.space, self.get_subnets(vlan)) + for vlan in sorted(vlans, key=attrgetter("vid")) ) class VlanDetail(DetailTable): - def __init__(self, *, fabrics=None, subnets=None): self.fabrics = fabrics self.subnets = subnets @@ -679,10 +636,13 @@ def __init__(self, *, fabrics=None, subnets=None): Column("primary_rack", "Primary rack"), Column("secondary_rack", "Secondary rack"), SpaceNameColumn("space", "Space"), - NestedTableColumn("subnets", "Subnets", SubnetsTable, None, { - 'visible_columns': ('name',), - 'fabrics': fabrics, - }), + NestedTableColumn( + "subnets", + "Subnets", + SubnetsTable, + None, + {"visible_columns": ("name",), "fabrics": fabrics}, + ), ) def get_fabric(self, vlan): @@ -692,11 +652,9 @@ def get_fabric(self, vlan): def get_subnets(self, vlan): """Return the subnets for the `vlan`.""" - return vlan._origin.Subnets([ - subnet - for subnet in self.subnets - if subnet.vlan.id == vlan.id - ]) + return vlan._origin.Subnets( + [subnet for subnet in self.subnets if subnet.vlan.id == vlan.id] + ) def get_rows(self, target, vlan): primary_rack = vlan.primary_rack @@ -714,61 +672,60 @@ def get_rows(self, target, vlan): primary_rack.hostname if primary_rack else None, secondary_rack.hostname if secondary_rack else None, vlan.space, - self.get_subnets(vlan) + self.get_subnets(vlan), ) class FabricsTable(Table): - def __init__(self, *, visible_columns=None, subnets=None): super().__init__( Column("name", "Fabric"), - NestedTableColumn( - "vlans", "VLANs", VlansTable, None, {'subnets': subnets}), - visible_columns=visible_columns + NestedTableColumn("vlans", "VLANs", VlansTable, None, {"subnets": subnets}), + visible_columns=visible_columns, ) def get_rows(self, target, fabrics): - self['vlans'].table_kwargs['fabrics'] = fabrics + self["vlans"].table_kwargs["fabrics"] = fabrics return ( - ( - fabric.name, - fabric.vlans - ) - for fabric in sorted(fabrics, key=attrgetter('id')) + (fabric.name, fabric.vlans) + for fabric in sorted(fabrics, key=attrgetter("id")) ) class FabricDetail(DetailTable): - def __init__(self, *, fabrics=None, subnets=None): super().__init__( Column("name", "Name"), NestedTableColumn( - "vlans", "VLANs", VlansTable, None, { - 'fabrics': fabrics, - 'subnets': subnets}), + "vlans", + "VLANs", + VlansTable, + None, + {"fabrics": fabrics, "subnets": subnets}, + ), ) def get_rows(self, target, fabric): - return ( - fabric.name, - fabric.vlans, - ) + return (fabric.name, fabric.vlans) class SpacesTable(Table): - def __init__(self, *, visible_columns=None, fabrics=None, subnets=None): self.fabrics = fabrics super().__init__( SpaceNameColumn("name", "Space"), NestedTableColumn( - "vlans", "VLANs", VlansTable, None, { - 'visible_columns': ('vid', 'dhcp', 'subnets'), - 'fabrics': fabrics, - 'subnets': subnets}), - visible_columns=visible_columns + "vlans", + "VLANs", + VlansTable, + None, + { + "visible_columns": ("vid", "dhcp", "subnets"), + "fabrics": fabrics, + "subnets": subnets, + }, + ), + visible_columns=visible_columns, ) def get_fabric(self, vlan): @@ -779,30 +736,32 @@ def get_fabric(self, vlan): def get_vlans(self, vlans): for vlan in vlans: - vlan._data['fabric'] = self.get_fabric(vlan) + vlan._data["fabric"] = self.get_fabric(vlan) return vlans def get_rows(self, target, spaces): return ( - ( - space, - self.get_vlans(space.vlans) - ) - for space in sorted(spaces, key=attrgetter('name')) + (space, self.get_vlans(space.vlans)) + for space in sorted(spaces, key=attrgetter("name")) ) class SpaceDetail(DetailTable): - def __init__(self, *, fabrics=None, subnets=None): self.fabrics = fabrics super().__init__( SpaceNameColumn("name", "Space"), NestedTableColumn( - "vlans", "VLANs", VlansTable, None, { - 'visible_columns': ('vid', 'dhcp', 'subnets'), - 'fabrics': fabrics, - 'subnets': subnets}), + "vlans", + "VLANs", + VlansTable, + None, + { + "visible_columns": ("vid", "dhcp", "subnets"), + "fabrics": fabrics, + "subnets": subnets, + }, + ), ) def get_fabric(self, vlan): @@ -813,11 +772,8 @@ def get_fabric(self, vlan): def get_vlans(self, vlans): for vlan in vlans: - vlan._data['fabric'] = self.get_fabric(vlan) + vlan._data["fabric"] = self.get_fabric(vlan) return vlans def get_rows(self, target, space): - return ( - space, - self.get_vlans(space.vlans) - ) + return (space, self.get_vlans(space.vlans)) diff --git a/maas/client/flesh/tabular.py b/maas/client/flesh/tabular.py index 6bf69c8f..a59c097d 100644 --- a/maas/client/flesh/tabular.py +++ b/maas/client/flesh/tabular.py @@ -1,16 +1,10 @@ """Helpers to assemble and render tabular data.""" -__all__ = [ - "Column", - "RenderTarget", - "Table", -] - -from abc import ( - ABCMeta, - abstractmethod, -) +__all__ = ["Column", "RenderTarget", "Table"] + +from abc import ABCMeta, abstractmethod import collections +from collections.abc import Iterable import csv import enum from io import StringIO @@ -40,19 +34,19 @@ def __str__(self): class Table(metaclass=ABCMeta): - def __init__(self, *columns, visible_columns=None): super(Table, self).__init__() self.columns = collections.OrderedDict( - (column.name, column) for column in columns) + (column.name, column) for column in columns + ) if visible_columns is None: - self.visible_columns = collections.OrderedDict( - self.columns.items()) + self.visible_columns = collections.OrderedDict(self.columns.items()) else: self.visible_columns = collections.OrderedDict( (column.name, column) for column in columns - if column.name in visible_columns) + if column.name in visible_columns + ) def __getitem__(self, name): return self.columns[name] @@ -78,8 +72,7 @@ def render(self, target, data): rows = self._filter_rows(rows) renderer = getattr(self, "_render_%s" % target.name, None) if renderer is None: - raise ValueError( - "Cannot render %r for %s." % (self.value, target)) + raise ValueError("Cannot render %r for %s." % (self.value, target)) else: return renderer(rows) @@ -100,15 +93,14 @@ def _compute_rows(self, target, data, duplicate=False): rows = [row] for datum, column in zip(row_data, columns): if isinstance(column, NestedTableColumn): - nested_rows = column.get_rows( - target, datum, duplicate=duplicate) + nested_rows = column.get_rows(target, datum, duplicate=duplicate) orig_row = list(row) row.extend(nested_rows[0]) for nested_row in nested_rows[1:]: new_row = list(orig_row) if not duplicate: for idx in range(len(new_row)): - new_row[idx] = '' + new_row[idx] = "" new_row.extend(nested_row) rows.append(new_row) else: @@ -118,70 +110,75 @@ def _compute_rows(self, target, data, duplicate=False): def _render_plain(self, data): rows = self._compute_rows(RenderTarget.plain, data) - rows.insert(0, [column.title for column in self._flatten_columns( - self.visible_columns)]) + rows.insert( + 0, [column.title for column in self._flatten_columns(self.visible_columns)] + ) return terminaltables.AsciiTable(rows).table def _render_pretty(self, data): rows = self._compute_rows(RenderTarget.pretty, data) - rows.insert(0, [column.title for column in self._flatten_columns( - self.visible_columns)]) + rows.insert( + 0, [column.title for column in self._flatten_columns(self.visible_columns)] + ) return terminaltables.SingleTable(rows).table def _render_yaml(self, data): columns = self.visible_columns.values() rows = [ - [column.render(RenderTarget.yaml, datum) - for datum, column in zip(row, columns)] + [ + column.render(RenderTarget.yaml, datum) + for datum, column in zip(row, columns) + ] for row in data ] - return yaml.safe_dump({ - "columns": [ - {"name": column.name, "title": column.title} - for column in columns - ], - "data": [ - {column.name: datum - for column, datum in zip(columns, row)} - for row in rows - ], - }, default_flow_style=False).rstrip(linesep) + return yaml.safe_dump( + { + "columns": [ + {"name": column.name, "title": column.title} for column in columns + ], + "data": [ + {column.name: datum for column, datum in zip(columns, row)} + for row in rows + ], + }, + default_flow_style=False, + ).rstrip(linesep) def _render_json(self, data): columns = self.visible_columns.values() rows = [ - [column.render(RenderTarget.json, datum) - for datum, column in zip(row, columns)] + [ + column.render(RenderTarget.json, datum) + for datum, column in zip(row, columns) + ] for row in data ] - return json.dumps({ - "columns": [ - {"name": column.name, "title": column.title} - for column in columns - ], - "data": [ - {column.name: datum - for column, datum in zip(columns, row)} - for row in rows - ], - }) + return json.dumps( + { + "columns": [ + {"name": column.name, "title": column.title} for column in columns + ], + "data": [ + {column.name: datum for column, datum in zip(columns, row)} + for row in rows + ], + } + ) def _render_csv(self, data): output = StringIO() writer = csv.writer(output) - writer.writerow([column.name for column in self._flatten_columns( - self.visible_columns)]) - writer.writerows( - self._compute_rows(RenderTarget.csv, data, duplicate=True)) + writer.writerow( + [column.name for column in self._flatten_columns(self.visible_columns)] + ) + writer.writerows(self._compute_rows(RenderTarget.csv, data, duplicate=True)) return output.getvalue().rstrip(linesep) def __repr__(self): - return "<%s [%s]>" % ( - self.__class__.__name__, " ".join(self.visible_columns)) + return "<%s [%s]>" % (self.__class__.__name__, " ".join(self.visible_columns)) class DetailTable(Table): - def _filter_rows(self, rows, visible_columns=None): """Filter `rows` based on the visible columns.""" if visible_columns is None: @@ -195,8 +192,7 @@ def _filter_rows(self, rows, visible_columns=None): def render(self, target, data): renderer = getattr(self, "_render_%s" % target.name, None) if renderer is None: - raise ValueError( - "Cannot render %r for %s." % (self.value, target)) + raise ValueError("Cannot render %r for %s." % (self.value, target)) else: return renderer(data) @@ -237,21 +233,17 @@ def _render_table(self, target, terminaltable, data): column.render(target, datum) for column, datum in zip(columns.values(), rows) ] - table = terminaltable([ - (column.title, datum) - for column, datum in zip(columns.values(), rows) - ]) + table = terminaltable( + [(column.title, datum) for column, datum in zip(columns.values(), rows)] + ) table.inner_heading_row_border = False - return table.table + self._render_nested_tables( - target, all_rows, table_columns) + return table.table + self._render_nested_tables(target, all_rows, table_columns) def _render_plain(self, data): - return self._render_table( - RenderTarget.plain, terminaltables.AsciiTable, data) + return self._render_table(RenderTarget.plain, terminaltables.AsciiTable, data) def _render_pretty(self, data): - return self._render_table( - RenderTarget.pretty, terminaltables.SingleTable, data) + return self._render_table(RenderTarget.pretty, terminaltables.SingleTable, data) def _render_yaml(self, data): columns = self.visible_columns.values() @@ -261,10 +253,10 @@ def _render_yaml(self, data): column.render(RenderTarget.yaml, datum) for column, datum in zip(columns, rows) ] - return yaml.safe_dump({ - column.name: datum - for column, datum in zip(columns, rows) - }, default_flow_style=False).rstrip(linesep) + return yaml.safe_dump( + {column.name: datum for column, datum in zip(columns, rows)}, + default_flow_style=False, + ).rstrip(linesep) def _render_json(self, data): columns = self.visible_columns.values() @@ -274,10 +266,7 @@ def _render_json(self, data): column.render(RenderTarget.json, datum) for column, datum in zip(columns, rows) ] - return json.dumps({ - column.name: datum - for column, datum in zip(columns, rows) - }) + return json.dumps({column.name: datum for column, datum in zip(columns, rows)}) def _render_csv(self, data): columns, table_columns = self._split_nested_tables() @@ -289,17 +278,15 @@ def _render_csv(self, data): ] output = StringIO() writer = csv.writer(output) - writer.writerows([ - (column.name, datum) - for column, datum in zip(columns.values(), rows) - ]) + writer.writerows( + [(column.name, datum) for column, datum in zip(columns.values(), rows)] + ) return output.getvalue().rstrip(linesep) + ( - self._render_nested_tables( - RenderTarget.csv, all_rows, table_columns)) + self._render_nested_tables(RenderTarget.csv, all_rows, table_columns) + ) class Column: - def __init__(self, name, title=None): super(Column, self).__init__() self.name = name @@ -311,8 +298,7 @@ def render(self, target, datum): elif target is RenderTarget.json: return datum elif target is RenderTarget.csv: - if (isinstance(datum, collections.Iterable) and - not isinstance(datum, (str, bytes))): + if isinstance(datum, Iterable) and not isinstance(datum, (str, bytes)): return ",".join(datum) else: return datum @@ -321,8 +307,7 @@ def render(self, target, datum): return "" elif isinstance(datum, colorclass.Color): return datum.value_no_colors - elif (isinstance(datum, collections.Iterable) and - not isinstance(datum, (str, bytes))): + elif isinstance(datum, Iterable) and not isinstance(datum, (str, bytes)): return "\n".join(datum) else: return str(datum) @@ -331,25 +316,25 @@ def render(self, target, datum): return "" elif isinstance(datum, colorclass.Color): return datum - elif (isinstance(datum, collections.Iterable) and - not isinstance(datum, (str, bytes))): + elif isinstance(datum, Iterable) and not isinstance(datum, (str, bytes)): return "\n".join(datum) else: return str(datum) else: - raise ValueError( - "Cannot render %r for %s" % (datum, target)) + raise ValueError("Cannot render %r for %s" % (datum, target)) def __repr__(self): return "<%s name=%s title=%r>" % ( - self.__class__.__name__, self.name, self.title) + self.__class__.__name__, + self.name, + self.title, + ) class NestedTableColumn(Column): - def __init__( - self, name, title=None, - table=None, table_args=None, table_kwargs=None): + self, name, title=None, table=None, table_args=None, table_kwargs=None + ): super(NestedTableColumn, self).__init__(name, title=title) self.table = table self.table_args = table_args @@ -373,15 +358,11 @@ def get_rows(self, target, data, duplicate=False): table = self.get_table() rows = table.get_rows(target, data) rows = table._filter_rows(rows) - rows = table._compute_rows( - target, rows, duplicate=duplicate) + rows = table._compute_rows(target, rows, duplicate=duplicate) if len(rows) == 0: # Nested table column must always return one row even if its # an empty row. - rows = [[ - ' ' - for _ in range(len(table.visible_columns)) - ]] + rows = [[" " for _ in range(len(table.visible_columns))]] return rows def render(self, target, datum): @@ -393,4 +374,5 @@ def render(self, target, datum): else: raise ValueError( "Should not be called on a nested table column, the " - "table render should handle this correctly.") + "table render should handle this correctly." + ) diff --git a/maas/client/flesh/tags.py b/maas/client/flesh/tags.py index 11e34a8c..e2498c2a 100644 --- a/maas/client/flesh/tags.py +++ b/maas/client/flesh/tags.py @@ -1,13 +1,8 @@ """Commands for tags.""" -__all__ = [ - "register", -] +__all__ = ["register"] -from . import ( - OriginPagedTableCommand, - tables, -) +from . import OriginPagedTableCommand, tables class cmd_tags(OriginPagedTableCommand): diff --git a/maas/client/flesh/tests/test_controllers.py b/maas/client/flesh/tests/test_controllers.py index b3a64a71..e5e7e411 100644 --- a/maas/client/flesh/tests/test_controllers.py +++ b/maas/client/flesh/tests/test_controllers.py @@ -4,11 +4,7 @@ import yaml from .testing import TestCaseWithProfile -from .. import ( - ArgumentParser, - controllers, - tabular -) +from .. import ArgumentParser, controllers, tabular from ...enum import NodeType from ...testing import make_name_without_spaces from ...viscera.testing import bind @@ -16,15 +12,13 @@ RackController, RackControllers, RegionController, - RegionControllers + RegionControllers, ) def make_origin(): """Make origin for controllers.""" - return bind( - RackControllers, RackController, - RegionController, RegionControllers) + return bind(RackControllers, RackController, RegionController, RegionControllers) class TestControllers(TestCaseWithProfile): @@ -37,38 +31,38 @@ def test_returns_table_with_controllers(self): region_rack_hostname = make_name_without_spaces() racks = [ { - 'system_id': region_rack_id, - 'hostname': region_rack_hostname, - 'node_type': NodeType.REGION_AND_RACK_CONTROLLER.value, - 'architecture': 'amd64/generic', - 'cpu_count': 2, - 'memory': 1024, + "system_id": region_rack_id, + "hostname": region_rack_hostname, + "node_type": NodeType.REGION_AND_RACK_CONTROLLER.value, + "architecture": "amd64/generic", + "cpu_count": 2, + "memory": 1024, }, { - 'system_id': make_name_without_spaces(), - 'hostname': make_name_without_spaces(), - 'node_type': NodeType.RACK_CONTROLLER.value, - 'architecture': 'amd64/generic', - 'cpu_count': 2, - 'memory': 1024, + "system_id": make_name_without_spaces(), + "hostname": make_name_without_spaces(), + "node_type": NodeType.RACK_CONTROLLER.value, + "architecture": "amd64/generic", + "cpu_count": 2, + "memory": 1024, }, ] regions = [ { - 'system_id': region_rack_id, - 'hostname': region_rack_hostname, - 'node_type': NodeType.REGION_AND_RACK_CONTROLLER.value, - 'architecture': 'amd64/generic', - 'cpu_count': 2, - 'memory': 1024, + "system_id": region_rack_id, + "hostname": region_rack_hostname, + "node_type": NodeType.REGION_AND_RACK_CONTROLLER.value, + "architecture": "amd64/generic", + "cpu_count": 2, + "memory": 1024, }, { - 'system_id': make_name_without_spaces(), - 'hostname': make_name_without_spaces(), - 'node_type': NodeType.REGION_CONTROLLER.value, - 'architecture': 'amd64/generic', - 'cpu_count': 2, - 'memory': 1024, + "system_id": make_name_without_spaces(), + "hostname": make_name_without_spaces(), + "node_type": NodeType.REGION_CONTROLLER.value, + "architecture": "amd64/generic", + "cpu_count": 2, + "memory": 1024, }, ] origin.RackControllers._handler.read.return_value = racks @@ -76,28 +70,33 @@ def test_returns_table_with_controllers(self): cmd = controllers.cmd_controllers(parser) subparser = controllers.cmd_controllers.register(parser) options = subparser.parse_args([]) - output = yaml.load( - cmd.execute(origin, options, target=tabular.RenderTarget.yaml)) - self.assertEquals([ - {'name': 'hostname', 'title': 'Hostname'}, - {'name': 'node_type', 'title': 'Type'}, - {'name': 'architecture', 'title': 'Arch'}, - {'name': 'cpus', 'title': '#CPUs'}, - {'name': 'memory', 'title': 'RAM'}, - ], output['columns']) + output = yaml.safe_load( + cmd.execute(origin, options, target=tabular.RenderTarget.yaml) + ) + self.assertEquals( + [ + {"name": "hostname", "title": "Hostname"}, + {"name": "node_type", "title": "Type"}, + {"name": "architecture", "title": "Arch"}, + {"name": "cpus", "title": "#CPUs"}, + {"name": "memory", "title": "RAM"}, + ], + output["columns"], + ) controller_output = { - controller['hostname']: { - 'hostname': controller['hostname'], - 'node_type': controller['node_type'], - 'architecture': controller['architecture'], - 'cpus': controller['cpu_count'], - 'memory': controller['memory'], + controller["hostname"]: { + "hostname": controller["hostname"], + "node_type": controller["node_type"], + "architecture": controller["architecture"], + "cpus": controller["cpu_count"], + "memory": controller["memory"], } for controller in racks + regions } self.assertEquals( - sorted(controller_output.values(), key=itemgetter('hostname')), - output['data']) + sorted(controller_output.values(), key=itemgetter("hostname")), + output["data"], + ) def test_calls_handler_with_hostnames(self): origin = make_origin() @@ -106,13 +105,10 @@ def test_calls_handler_with_hostnames(self): origin.RegionControllers._handler.read.return_value = [] subparser = controllers.cmd_controllers.register(parser) cmd = controllers.cmd_controllers(parser) - hostnames = [ - make_name_without_spaces() - for _ in range(3) - ] + hostnames = [make_name_without_spaces() for _ in range(3)] options = subparser.parse_args(hostnames) cmd.execute(origin, options, target=tabular.RenderTarget.yaml) - origin.RackControllers._handler.read.assert_called_once_with( - hostname=hostnames) + origin.RackControllers._handler.read.assert_called_once_with(hostname=hostnames) origin.RegionControllers._handler.read.assert_called_once_with( - hostname=hostnames) + hostname=hostnames + ) diff --git a/maas/client/flesh/tests/test_devices.py b/maas/client/flesh/tests/test_devices.py index 6f7d0997..2b355725 100644 --- a/maas/client/flesh/tests/test_devices.py +++ b/maas/client/flesh/tests/test_devices.py @@ -4,32 +4,19 @@ import yaml from .testing import TestCaseWithProfile -from .. import ( - ArgumentParser, - devices, - tabular -) +from .. import ArgumentParser, devices, tabular from ...testing import make_name_without_spaces from ...viscera.testing import bind -from ...viscera.devices import ( - Device, - Devices -) -from ...viscera.interfaces import ( - Interface, - Interfaces, - InterfaceLink, - InterfaceLinks, -) +from ...viscera.devices import Device, Devices +from ...viscera.interfaces import Interface, Interfaces, InterfaceLink, InterfaceLinks from ...viscera.users import User def make_origin(): """Make origin for devices.""" return bind( - Devices, Device, User, - Interfaces, Interface, - InterfaceLinks, InterfaceLink) + Devices, Device, User, Interfaces, Interface, InterfaceLinks, InterfaceLink + ) class TestDevices(TestCaseWithProfile): @@ -40,45 +27,21 @@ def test_returns_table_with_devices(self): parser = ArgumentParser() devices_objs = [ { - 'hostname': make_name_without_spaces(), - 'owner': make_name_without_spaces(), - 'interface_set': [ - { - 'links': [ - {'ip_address': '192.168.122.1'} - ], - }, - { - 'links': [ - {'ip_address': '192.168.122.2'} - ], - }, - { - 'links': [ - {} - ], - }, + "hostname": make_name_without_spaces(), + "owner": make_name_without_spaces(), + "interface_set": [ + {"links": [{"ip_address": "192.168.122.1"}]}, + {"links": [{"ip_address": "192.168.122.2"}]}, + {"links": [{}]}, ], }, { - 'hostname': make_name_without_spaces(), - 'owner': make_name_without_spaces(), - 'interface_set': [ - { - 'links': [ - {'ip_address': '192.168.122.10'} - ], - }, - { - 'links': [ - {'ip_address': '192.168.122.11'} - ], - }, - { - 'links': [ - {} - ], - }, + "hostname": make_name_without_spaces(), + "owner": make_name_without_spaces(), + "interface_set": [ + {"links": [{"ip_address": "192.168.122.10"}]}, + {"links": [{"ip_address": "192.168.122.11"}]}, + {"links": [{}]}, ], }, ] @@ -86,27 +49,34 @@ def test_returns_table_with_devices(self): cmd = devices.cmd_devices(parser) subparser = devices.cmd_devices.register(parser) options = subparser.parse_args([]) - output = yaml.load( - cmd.execute(origin, options, target=tabular.RenderTarget.yaml)) - self.assertEquals([ - {'name': 'hostname', 'title': 'Hostname'}, - {'name': 'owner', 'title': 'Owner'}, - {'name': 'ip_addresses', 'title': 'IP addresses'}, - ], output['columns']) - devices_output = sorted([ - { - 'hostname': device['hostname'], - 'owner': device['owner'] if device['owner'] else '(none)', - 'ip_addresses': [ - link['ip_address'] - for nic in device['interface_set'] - for link in nic['links'] - if link.get('ip_address') - ] - } - for device in devices_objs - ], key=itemgetter('hostname')) - self.assertEquals(devices_output, output['data']) + output = yaml.safe_load( + cmd.execute(origin, options, target=tabular.RenderTarget.yaml) + ) + self.assertEquals( + [ + {"name": "hostname", "title": "Hostname"}, + {"name": "owner", "title": "Owner"}, + {"name": "ip_addresses", "title": "IP addresses"}, + ], + output["columns"], + ) + devices_output = sorted( + [ + { + "hostname": device["hostname"], + "owner": device["owner"] if device["owner"] else "(none)", + "ip_addresses": [ + link["ip_address"] + for nic in device["interface_set"] + for link in nic["links"] + if link.get("ip_address") + ], + } + for device in devices_objs + ], + key=itemgetter("hostname"), + ) + self.assertEquals(devices_output, output["data"]) def test_calls_handler_with_hostnames(self): origin = make_origin() @@ -114,11 +84,7 @@ def test_calls_handler_with_hostnames(self): origin.Devices._handler.read.return_value = [] subparser = devices.cmd_devices.register(parser) cmd = devices.cmd_devices(parser) - hostnames = [ - make_name_without_spaces() - for _ in range(3) - ] + hostnames = [make_name_without_spaces() for _ in range(3)] options = subparser.parse_args(hostnames) cmd.execute(origin, options, target=tabular.RenderTarget.yaml) - origin.Devices._handler.read.assert_called_once_with( - hostname=hostnames) + origin.Devices._handler.read.assert_called_once_with(hostname=hostnames) diff --git a/maas/client/flesh/tests/test_machines.py b/maas/client/flesh/tests/test_machines.py index 93ab286e..6e3c9412 100644 --- a/maas/client/flesh/tests/test_machines.py +++ b/maas/client/flesh/tests/test_machines.py @@ -1,30 +1,24 @@ """Tests for `maas.client.flesh.machines`.""" +from functools import partial from operator import itemgetter import yaml from .testing import TestCaseWithProfile -from .. import ( - ArgumentParser, - machines, - tabular -) -from ...enum import ( - NodeStatus, - PowerState -) +from .. import ArgumentParser, machines, tabular +from ...enum import NodeStatus, PowerState from ...testing import make_name_without_spaces from ...viscera.testing import bind -from ...viscera.machines import ( - Machine, - Machines -) +from ...viscera.machines import Machine, Machines +from ...viscera.resource_pools import ResourcePool +from ...viscera.tags import Tag, Tags from ...viscera.users import User +from ...viscera.zones import Zone def make_origin(): """Make origin for machines.""" - return bind(Machines, Machine, User) + return bind(Machines, Machine, User, ResourcePool, Zone, Tag, Tags) class TestMachines(TestCaseWithProfile): @@ -35,54 +29,69 @@ def test_returns_table_with_machines(self): parser = ArgumentParser() machine_objs = [ { - 'hostname': make_name_without_spaces(), - 'architecture': 'amd64/generic', - 'status': NodeStatus.READY.value, - 'status_name': NodeStatus.READY.name, - 'owner': None, - 'power_state': PowerState.OFF.value, - 'cpu_count': 2, - 'memory': 1024, + "hostname": make_name_without_spaces(), + "architecture": "amd64/generic", + "status": NodeStatus.READY.value, + "status_name": NodeStatus.READY.name, + "owner": None, + "power_state": PowerState.OFF.value, + "cpu_count": 2, + "memory": 1024, + "pool": {"id": 1, "name": "pool1", "description": "pool1"}, + "zone": {"id": 1, "name": "zone1", "description": "zone1"}, }, { - 'hostname': make_name_without_spaces(), - 'architecture': 'i386/generic', - 'status': NodeStatus.DEPLOYED.value, - 'status_name': NodeStatus.DEPLOYED.name, - 'owner': make_name_without_spaces(), - 'power_state': PowerState.ON.value, - 'cpu_count': 4, - 'memory': 4096, + "hostname": make_name_without_spaces(), + "architecture": "i386/generic", + "status": NodeStatus.DEPLOYED.value, + "status_name": NodeStatus.DEPLOYED.name, + "owner": make_name_without_spaces(), + "power_state": PowerState.ON.value, + "cpu_count": 4, + "memory": 4096, + "pool": {"id": 2, "name": "pool2", "description": "pool2"}, + "zone": {"id": 2, "name": "zone2", "description": "zone2"}, }, ] origin.Machines._handler.read.return_value = machine_objs cmd = machines.cmd_machines(parser) subparser = machines.cmd_machines.register(parser) options = subparser.parse_args([]) - output = yaml.load( - cmd.execute(origin, options, target=tabular.RenderTarget.yaml)) - self.assertEquals([ - {'name': 'hostname', 'title': 'Hostname'}, - {'name': 'power', 'title': 'Power'}, - {'name': 'status', 'title': 'Status'}, - {'name': 'owner', 'title': 'Owner'}, - {'name': 'architecture', 'title': 'Arch'}, - {'name': 'cpus', 'title': '#CPUs'}, - {'name': 'memory', 'title': 'RAM'}, - ], output['columns']) - machines_output = sorted([ - { - 'hostname': machine['hostname'], - 'power': machine['power_state'], - 'status': machine['status_name'], - 'owner': machine['owner'] if machine['owner'] else '(none)', - 'architecture': machine['architecture'], - 'cpus': machine['cpu_count'], - 'memory': machine['memory'], - } - for machine in machine_objs - ], key=itemgetter('hostname')) - self.assertEquals(machines_output, output['data']) + output = yaml.safe_load( + cmd.execute(origin, options, target=tabular.RenderTarget.yaml) + ) + self.assertEquals( + [ + {"name": "hostname", "title": "Hostname"}, + {"name": "power", "title": "Power"}, + {"name": "status", "title": "Status"}, + {"name": "owner", "title": "Owner"}, + {"name": "architecture", "title": "Arch"}, + {"name": "cpus", "title": "#CPUs"}, + {"name": "memory", "title": "RAM"}, + {"name": "pool", "title": "Resource pool"}, + {"name": "zone", "title": "Zone"}, + ], + output["columns"], + ) + machines_output = sorted( + [ + { + "hostname": machine["hostname"], + "power": machine["power_state"], + "status": machine["status_name"], + "owner": machine["owner"] if machine["owner"] else "(none)", + "architecture": machine["architecture"], + "cpus": machine["cpu_count"], + "memory": machine["memory"], + "pool": machine["pool"]["name"], + "zone": machine["zone"]["name"], + } + for machine in machine_objs + ], + key=itemgetter("hostname"), + ) + self.assertEquals(machines_output, output["data"]) def test_calls_handler_with_hostnames(self): origin = make_origin() @@ -90,11 +99,67 @@ def test_calls_handler_with_hostnames(self): origin.Machines._handler.read.return_value = [] subparser = machines.cmd_machines.register(parser) cmd = machines.cmd_machines(parser) - hostnames = [ - make_name_without_spaces() - for _ in range(3) - ] + hostnames = [make_name_without_spaces() for _ in range(3)] options = subparser.parse_args(hostnames) cmd.execute(origin, options, target=tabular.RenderTarget.yaml) - origin.Machines._handler.read.assert_called_once_with( - hostname=hostnames) + origin.Machines._handler.read.assert_called_once_with(hostname=hostnames) + + +class TestMachine(TestCaseWithProfile): + """Tests for `cmd_machine`.""" + + def setUp(self): + super().setUp() + origin = make_origin() + parser = ArgumentParser() + self.hostname = make_name_without_spaces() + machine_objs = [ + { + "hostname": self.hostname, + "architecture": "amd64/generic", + "status": NodeStatus.READY.value, + "status_name": NodeStatus.READY.name, + "owner": None, + "power_state": PowerState.OFF.value, + "cpu_count": 2, + "memory": 1024, + "pool": {"id": 1, "name": "pool1", "description": "pool1"}, + "zone": {"id": 1, "name": "zone1", "description": "zone1"}, + "tag_names": ["tag1", "tag2"], + "distro_series": "", + "power_type": "Manual", + }, + ] + origin.Machines._handler.read.return_value = machine_objs + cmd = machines.cmd_machine(parser) + subparser = machines.cmd_machine.register(parser) + options = subparser.parse_args([machine_objs[0]["hostname"]]) + self.cmd = partial(cmd.execute, origin, options) + + def test_yaml_machine_details_with_tags(self): + yaml_output = yaml.safe_load(self.cmd(target=tabular.RenderTarget.yaml)) + self.assertEqual(yaml_output.get("tags"), ["tag1", "tag2"]) + + def test_plain_machine_details_with_tags(self): + plain_output = self.cmd(target=tabular.RenderTarget.plain) + self.assertEqual( + plain_output, + f"""\ ++---------------+-------------+ +| Hostname | {self.hostname} | +| Status | READY | +| Image | (none) | +| Power | Off | +| Power Type | Manual | +| Arch | amd64 | +| #CPUs | 2 | +| RAM | 1.0 GB | +| Interfaces | 0 physical | +| IP addresses | | +| Resource pool | pool1 | +| Zone | zone1 | +| Owner | (none) | +| Tags | tag1 | +| | tag2 | ++---------------+-------------+""", + ) diff --git a/maas/client/flesh/tests/test_nodes.py b/maas/client/flesh/tests/test_nodes.py index a861090b..5dce2604 100644 --- a/maas/client/flesh/tests/test_nodes.py +++ b/maas/client/flesh/tests/test_nodes.py @@ -4,18 +4,11 @@ import yaml from .testing import TestCaseWithProfile -from .. import ( - ArgumentParser, - nodes, - tabular -) +from .. import ArgumentParser, nodes, tabular from ...enum import NodeType from ...testing import make_name_without_spaces from ...viscera.testing import bind -from ...viscera.nodes import ( - Node, - Nodes -) +from ...viscera.nodes import Node, Nodes def make_origin(): @@ -31,44 +24,48 @@ def test_returns_table_with_nodes(self): parser = ArgumentParser() node_obj = [ { - 'hostname': make_name_without_spaces(), - 'node_type': NodeType.MACHINE.value, + "hostname": make_name_without_spaces(), + "node_type": NodeType.MACHINE.value, }, { - 'hostname': make_name_without_spaces(), - 'node_type': NodeType.DEVICE.value, + "hostname": make_name_without_spaces(), + "node_type": NodeType.DEVICE.value, }, { - 'hostname': make_name_without_spaces(), - 'node_type': NodeType.RACK_CONTROLLER.value, + "hostname": make_name_without_spaces(), + "node_type": NodeType.RACK_CONTROLLER.value, }, { - 'hostname': make_name_without_spaces(), - 'node_type': NodeType.REGION_CONTROLLER.value, + "hostname": make_name_without_spaces(), + "node_type": NodeType.REGION_CONTROLLER.value, }, { - 'hostname': make_name_without_spaces(), - 'node_type': NodeType.REGION_AND_RACK_CONTROLLER.value, + "hostname": make_name_without_spaces(), + "node_type": NodeType.REGION_AND_RACK_CONTROLLER.value, }, ] origin.Nodes._handler.read.return_value = node_obj cmd = nodes.cmd_nodes(parser) subparser = nodes.cmd_nodes.register(parser) options = subparser.parse_args([]) - output = yaml.load( - cmd.execute(origin, options, target=tabular.RenderTarget.yaml)) - self.assertEquals([ - {'name': 'hostname', 'title': 'Hostname'}, - {'name': 'node_type', 'title': 'Type'}, - ], output['columns']) - nodes_output = sorted([ - { - 'hostname': node['hostname'], - 'node_type': node['node_type'], - } - for node in node_obj - ], key=itemgetter('hostname')) - self.assertEquals(nodes_output, output['data']) + output = yaml.safe_load( + cmd.execute(origin, options, target=tabular.RenderTarget.yaml) + ) + self.assertEquals( + [ + {"name": "hostname", "title": "Hostname"}, + {"name": "node_type", "title": "Type"}, + ], + output["columns"], + ) + nodes_output = sorted( + [ + {"hostname": node["hostname"], "node_type": node["node_type"]} + for node in node_obj + ], + key=itemgetter("hostname"), + ) + self.assertEquals(nodes_output, output["data"]) def test_calls_handler_with_hostnames(self): origin = make_origin() @@ -76,11 +73,7 @@ def test_calls_handler_with_hostnames(self): origin.Nodes._handler.read.return_value = [] subparser = nodes.cmd_nodes.register(parser) cmd = nodes.cmd_nodes(parser) - hostnames = [ - make_name_without_spaces() - for _ in range(3) - ] + hostnames = [make_name_without_spaces() for _ in range(3)] options = subparser.parse_args(hostnames) cmd.execute(origin, options, target=tabular.RenderTarget.yaml) - origin.Nodes._handler.read.assert_called_once_with( - hostname=hostnames) + origin.Nodes._handler.read.assert_called_once_with(hostname=hostnames) diff --git a/maas/client/flesh/tests/test_profiles.py b/maas/client/flesh/tests/test_profiles.py index b3ed0dde..d71e7270 100644 --- a/maas/client/flesh/tests/test_profiles.py +++ b/maas/client/flesh/tests/test_profiles.py @@ -1,22 +1,58 @@ """Tests for `maas.client.flesh.profiles`.""" +from argparse import ArgumentParser from io import StringIO import sys from textwrap import dedent +from unittest.mock import call from .. import profiles -from ...testing import TestCase +from ...bones.helpers import MacaroonLoginNotSupported +from ...testing import AsyncCallableMock, TestCase from ...utils.tests.test_profiles import make_profile class TestLogin(TestCase): """Tests for `cmd_login`.""" + def test_login_no_macaroons_prompts_user_pass(self): + profile = make_profile() + + stdout = self.patch(sys, "stdout", StringIO()) + mock_read_input = self.patch(profiles, "read_input") + mock_read_input.side_effect = ["username", "password"] + mock_login = AsyncCallableMock(side_effect=[MacaroonLoginNotSupported, profile]) + self.patch(profiles.helpers, "login", mock_login) + + parser = ArgumentParser() + cmd = profiles.cmd_login(parser) + options = parser.parse_args(["http://maas.example"]) + cmd(options) + mock_login.assert_has_calls( + [ + call( + "http://maas.example/api/2.0/", + anonymous=False, + insecure=False, + username=None, + password=None, + ), + call( + "http://maas.example/api/2.0/", + insecure=False, + username="username", + password="password", + ), + ] + ) + self.assertIn("Congratulations!", stdout.getvalue()) + def test_print_whats_next(self): profile = make_profile() stdout = self.patch(sys, "stdout", StringIO()) profiles.cmd_login.print_whats_next(profile) - expected = dedent("""\ + expected = dedent( + """\ Congratulations! You are logged in to the MAAS server at {profile.url} with the profile name {profile.name}. @@ -25,6 +61,7 @@ def test_print_whats_next(self): maas help - """).format(profile=profile) + """ + ).format(profile=profile) observed = stdout.getvalue() self.assertDocTestMatches(expected, observed) diff --git a/maas/client/flesh/tests/test_shell.py b/maas/client/flesh/tests/test_shell.py index 98ffb6fd..43787148 100644 --- a/maas/client/flesh/tests/test_shell.py +++ b/maas/client/flesh/tests/test_shell.py @@ -3,20 +3,10 @@ import random import sys -from testtools.matchers import ( - Contains, - Equals, - Is, -) - -from .. import ( - ArgumentParser, - shell, -) -from ...testing import ( - make_name_without_spaces, - TestCase, -) +from testtools.matchers import Contains, Equals, Is + +from .. import ArgumentParser, shell +from ...testing import make_name_without_spaces, TestCase from .testing import capture_parse_error @@ -44,13 +34,12 @@ def test_offers_profile_name_option_when_no_profiles_exist(self): def test_offers_profile_name_option_when_profiles_exist(self): profile_name_choices = tuple( - make_name_without_spaces("profile-name") for _ in range(5)) + make_name_without_spaces("profile-name") for _ in range(5) + ) profile_name_default = random.choice(profile_name_choices) - self.patch( - shell.cmd_shell, "profile_name_choices", profile_name_choices) - self.patch( - shell.cmd_shell, "profile_name_default", profile_name_default) + self.patch(shell.cmd_shell, "profile_name_choices", profile_name_choices) + self.patch(shell.cmd_shell, "profile_name_default", profile_name_default) parser = ArgumentParser() subparser = shell.cmd_shell.register(parser) @@ -68,8 +57,9 @@ def test_offers_profile_name_option_when_profiles_exist(self): self.patch(subparser, "error").side_effect = Exception profile_name = make_name_without_spaces("foo") error = capture_parse_error(subparser, "--profile-name", profile_name) - self.assertThat(str(error), Contains( - "argument --profile-name: invalid choice: ")) + self.assertThat( + str(error), Contains("argument --profile-name: invalid choice: ") + ) def electAttribute(self): # We're going to use an attribute in this module as the means for an @@ -103,8 +93,10 @@ def test_runs_stdin_when_not_interactive(self): # Mimic a non-interactive invocation of `maas shell`. self.patch(shell, "sys") shell.sys.stdin.isatty.return_value = False - shell.sys.stdin.read.return_value = ( - "import %s as mod; mod.%s = __file__" % (__name__, attrname)) + shell.sys.stdin.read.return_value = "import %s as mod; mod.%s = __file__" % ( + __name__, + attrname, + ) self.callShell() # That attribute has been updated. diff --git a/maas/client/flesh/tests/testing/__init__.py b/maas/client/flesh/tests/testing/__init__.py index 91c5b817..2a2dfbfd 100644 --- a/maas/client/flesh/tests/testing/__init__.py +++ b/maas/client/flesh/tests/testing/__init__.py @@ -1,8 +1,6 @@ """Test helpers for `maas.client.flesh`.""" -__all__ = [ - "capture_parse_error", -] +__all__ = ["capture_parse_error"] import argparse diff --git a/maas/client/flesh/users.py b/maas/client/flesh/users.py index 04a937cf..9d715908 100644 --- a/maas/client/flesh/users.py +++ b/maas/client/flesh/users.py @@ -1,13 +1,8 @@ """Commands for users.""" -__all__ = [ - "register", -] +__all__ = ["register"] -from . import ( - OriginPagedTableCommand, - tables, -) +from . import OriginPagedTableCommand, tables class cmd_users(OriginPagedTableCommand): diff --git a/maas/client/flesh/vlans.py b/maas/client/flesh/vlans.py index ce950e01..1229d38c 100644 --- a/maas/client/flesh/vlans.py +++ b/maas/client/flesh/vlans.py @@ -1,18 +1,12 @@ """Commands for vlans.""" -__all__ = [ - "register", -] +__all__ = ["register"] from http import HTTPStatus -from . import ( - CommandError, - OriginPagedTableCommand, - tables, -) +from . import CommandError, OriginPagedTableCommand, tables from ..bones import CallError -from ..utils.async import asynchronous +from ..utils.maas_async import asynchronous class cmd_vlans(OriginPagedTableCommand): @@ -20,10 +14,10 @@ class cmd_vlans(OriginPagedTableCommand): def __init__(self, parser): super(cmd_vlans, self).__init__(parser) - parser.add_argument("fabric", nargs=1, help=( - "Name of the fabric.")) - parser.add_argument("--minimal", action="store_true", help=( - "Output only the VIDs.")) + parser.add_argument("fabric", nargs=1, help=("Name of the fabric.")) + parser.add_argument( + "--minimal", action="store_true", help=("Output only the VIDs.") + ) @asynchronous async def load_object_sets(self, origin): @@ -34,21 +28,19 @@ async def load_object_sets(self, origin): def execute(self, origin, options, target): visible_columns = None if options.minimal: - visible_columns = ('vid',) + visible_columns = ("vid",) try: fabric = origin.Fabric.read(options.fabric[0]) except CallError as error: if error.status == HTTPStatus.NOT_FOUND: - raise CommandError( - "Unable to find fabric %s." % options.fabric[0]) + raise CommandError("Unable to find fabric %s." % options.fabric[0]) else: raise fabrics, subnets = self.load_object_sets(origin) table = tables.VlansTable( - visible_columns=visible_columns, - fabrics=fabrics, subnets=subnets) - return table.render( - target, fabric.vlans) + visible_columns=visible_columns, fabrics=fabrics, subnets=subnets + ) + return table.render(target, fabric.vlans) class cmd_vlan(OriginPagedTableCommand): @@ -56,10 +48,8 @@ class cmd_vlan(OriginPagedTableCommand): def __init__(self, parser): super(cmd_vlan, self).__init__(parser) - parser.add_argument("fabric", nargs=1, help=( - "Name of the fabric.")) - parser.add_argument("vid", nargs=1, help=( - "VID of the VLAN.")) + parser.add_argument("fabric", nargs=1, help=("Name of the fabric.")) + parser.add_argument("vid", nargs=1, help=("VID of the VLAN.")) @asynchronous async def load_object_sets(self, origin): @@ -80,12 +70,11 @@ def execute(self, origin, options, target): fabric = origin.Fabric.read(options.fabric[0]) except CallError as error: if error.status == HTTPStatus.NOT_FOUND: - raise CommandError( - "Unable to find fabric %s." % options.fabric[0]) + raise CommandError("Unable to find fabric %s." % options.fabric[0]) else: raise vlan_id = options.vid[0] - if vlan_id != 'untagged': + if vlan_id != "untagged": try: vlan_id = int(vlan_id) except ValueError: @@ -96,8 +85,9 @@ def execute(self, origin, options, target): vlan = fabric.vlans.get_default() if vlan is None: raise CommandError( - "Unable to find VLAN %s on fabric %s." % ( - options.vid[0], options.fabric[0])) + "Unable to find VLAN %s on fabric %s." + % (options.vid[0], options.fabric[0]) + ) fabrics, subnets = self.load_object_sets(origin) table = tables.VlanDetail(fabrics=fabrics, subnets=subnets) return table.render(target, vlan) diff --git a/maas/client/testing/__init__.py b/maas/client/testing/__init__.py index 454233b5..4e740acd 100644 --- a/maas/client/testing/__init__.py +++ b/maas/client/testing/__init__.py @@ -19,10 +19,7 @@ import asyncio import doctest from functools import partial -from itertools import ( - islice, - repeat, -) +from itertools import islice, repeat from pathlib import Path import random import string @@ -33,14 +30,14 @@ from testtools import testcase from testtools.matchers import DocTestMatches -from ..utils.async import Asynchronous +from ..utils.maas_async import Asynchronous -random_letters = map( - random.choice, repeat(string.ascii_letters + string.digits)) +random_letters = map(random.choice, repeat(string.ascii_letters + string.digits)) random_letters_with_spaces = map( - random.choice, repeat(string.ascii_letters + string.digits + ' ')) + random.choice, repeat(string.ascii_letters + string.digits + " ") +) random_octet = partial(random.randint, 0, 255) @@ -57,7 +54,7 @@ def make_string_without_spaces(size=10): return "".join(islice(random_letters, size)) -def make_name(prefix="name", sep='-', size=6): +def make_name(prefix="name", sep="-", size=6): """Make a random name. :param prefix: Optional prefix. Defaults to "name". @@ -69,7 +66,7 @@ def make_name(prefix="name", sep='-', size=6): return prefix + sep + make_string(size) -def make_name_without_spaces(prefix="name", sep='-', size=6): +def make_name_without_spaces(prefix="name", sep="-", size=6): """Make a random name WITHOUT spaces. :param prefix: Optional prefix. Defaults to "name". diff --git a/maas/client/tests/test.py b/maas/client/tests/test.py index 296b589c..5b467cd2 100644 --- a/maas/client/tests/test.py +++ b/maas/client/tests/test.py @@ -3,19 +3,11 @@ from inspect import signature from unittest.mock import sentinel -from testtools.matchers import ( - Equals, - Is, - IsInstance, - Not, -) +from testtools.matchers import Equals, Is, IsInstance, Not from .. import facade from ... import client -from ..testing import ( - AsyncCallableMock, - TestCase, -) +from ..testing import AsyncCallableMock, TestCase from ..viscera import Origin @@ -30,9 +22,11 @@ def test__connect_calls_through_to_Origin(self): connect = self.patch(Origin, "connect", AsyncCallableMock()) connect.return_value = sentinel.profile, sentinel.origin client_object = client.connect( - sentinel.url, apikey=sentinel.apikey, insecure=sentinel.insecure) + sentinel.url, apikey=sentinel.apikey, insecure=sentinel.insecure + ) connect.assert_called_once_with( - sentinel.url, apikey=sentinel.apikey, insecure=sentinel.insecure) + sentinel.url, apikey=sentinel.apikey, insecure=sentinel.insecure + ) self.assertThat(client_object, IsInstance(facade.Client)) self.assertThat(client_object._origin, Is(sentinel.origin)) @@ -44,11 +38,17 @@ def test__login_calls_through_to_Origin(self): login = self.patch(Origin, "login", AsyncCallableMock()) login.return_value = sentinel.profile, sentinel.origin client_object = client.login( - sentinel.url, username=sentinel.username, - password=sentinel.password, insecure=sentinel.insecure) + sentinel.url, + username=sentinel.username, + password=sentinel.password, + insecure=sentinel.insecure, + ) login.assert_called_once_with( - sentinel.url, username=sentinel.username, - password=sentinel.password, insecure=sentinel.insecure) + sentinel.url, + username=sentinel.username, + password=sentinel.password, + insecure=sentinel.insecure, + ) self.assertThat(client_object, IsInstance(facade.Client)) self.assertThat(client_object._origin, Is(sentinel.origin)) diff --git a/maas/client/tests/test_facade.py b/maas/client/tests/test_facade.py index aefad83a..6e98de95 100644 --- a/maas/client/tests/test_facade.py +++ b/maas/client/tests/test_facade.py @@ -2,16 +2,9 @@ from unittest.mock import Mock -from testtools.matchers import ( - IsInstance, - MatchesAll, - MatchesStructure, -) - -from .. import ( - facade, - viscera, -) +from testtools.matchers import IsInstance, MatchesAll, MatchesStructure + +from .. import facade, viscera from ..testing import TestCase @@ -31,182 +24,252 @@ def setUp(self): self.client = facade.Client(self.origin) def test__client_maps_account(self): - self.assertThat(self.client, MatchesClient( - account=MatchesFacade( - create_credentials=self.origin.Account.create_credentials, - delete_credentials=self.origin.Account.delete_credentials, + self.assertThat( + self.client, + MatchesClient( + account=MatchesFacade( + create_credentials=self.origin.Account.create_credentials, + delete_credentials=self.origin.Account.delete_credentials, + ) ), - )) + ) def test__client_maps_boot_resources(self): - self.assertThat(self.client, MatchesClient( - boot_resources=MatchesFacade( - create=self.origin.BootResources.create, - get=self.origin.BootResource.read, - list=self.origin.BootResources.read, - start_import=self.origin.BootResources.start_import, - stop_import=self.origin.BootResources.stop_import, + self.assertThat( + self.client, + MatchesClient( + boot_resources=MatchesFacade( + create=self.origin.BootResources.create, + get=self.origin.BootResource.read, + list=self.origin.BootResources.read, + start_import=self.origin.BootResources.start_import, + stop_import=self.origin.BootResources.stop_import, + ) ), - )) + ) def test__client_maps_boot_sources(self): - self.assertThat(self.client, MatchesClient( - boot_sources=MatchesFacade( - create=self.origin.BootSources.create, - get=self.origin.BootSource.read, - list=self.origin.BootSources.read, + self.assertThat( + self.client, + MatchesClient( + boot_sources=MatchesFacade( + create=self.origin.BootSources.create, + get=self.origin.BootSource.read, + list=self.origin.BootSources.read, + ) ), - )) + ) def test__client_maps_devices(self): - self.assertThat(self.client, MatchesClient( - devices=MatchesFacade( - get=self.origin.Device.read, - list=self.origin.Devices.read, + self.assertThat( + self.client, + MatchesClient( + devices=MatchesFacade( + create=self.origin.Devices.create, + get=self.origin.Device.read, + list=self.origin.Devices.read, + ) ), - )) + ) def test__client_maps_events(self): - self.assertThat(self.client, MatchesClient( - events=MatchesFacade( - query=self.origin.Events.query, - DEBUG=self.origin.Events.Level.DEBUG, - INFO=self.origin.Events.Level.INFO, - WARNING=self.origin.Events.Level.WARNING, - ERROR=self.origin.Events.Level.ERROR, - CRITICAL=self.origin.Events.Level.CRITICAL, + self.assertThat( + self.client, + MatchesClient( + events=MatchesFacade( + query=self.origin.Events.query, + DEBUG=self.origin.Events.Level.DEBUG, + INFO=self.origin.Events.Level.INFO, + WARNING=self.origin.Events.Level.WARNING, + ERROR=self.origin.Events.Level.ERROR, + CRITICAL=self.origin.Events.Level.CRITICAL, + ) ), - )) + ) def test__client_maps_fabrics(self): - self.assertThat(self.client, MatchesClient( - fabrics=MatchesFacade( - create=self.origin.Fabrics.create, - get=self.origin.Fabric.read, - get_default=self.origin.Fabric.get_default, - list=self.origin.Fabrics.read, - ) - )) + self.assertThat( + self.client, + MatchesClient( + fabrics=MatchesFacade( + create=self.origin.Fabrics.create, + get=self.origin.Fabric.read, + get_default=self.origin.Fabric.get_default, + list=self.origin.Fabrics.read, + ) + ), + ) def test__client_maps_subnets(self): - self.assertThat(self.client, MatchesClient( - subnets=MatchesFacade( - create=self.origin.Subnets.create, - get=self.origin.Subnet.read, - list=self.origin.Subnets.read, - ) - )) + self.assertThat( + self.client, + MatchesClient( + subnets=MatchesFacade( + create=self.origin.Subnets.create, + get=self.origin.Subnet.read, + list=self.origin.Subnets.read, + ) + ), + ) def test__client_maps_spaces(self): - self.assertThat(self.client, MatchesClient( - spaces=MatchesFacade( - create=self.origin.Spaces.create, - get=self.origin.Space.read, - get_default=self.origin.Space.get_default, - list=self.origin.Spaces.read, - ) - )) + self.assertThat( + self.client, + MatchesClient( + spaces=MatchesFacade( + create=self.origin.Spaces.create, + get=self.origin.Space.read, + get_default=self.origin.Space.get_default, + list=self.origin.Spaces.read, + ) + ), + ) def test__client_maps_ip_ranges(self): - self.assertThat(self.client, MatchesClient( - ip_ranges=MatchesFacade( - create=self.origin.IPRanges.create, - get=self.origin.IPRange.read, - list=self.origin.IPRanges.read, - ) - )) + self.assertThat( + self.client, + MatchesClient( + ip_ranges=MatchesFacade( + create=self.origin.IPRanges.create, + get=self.origin.IPRange.read, + list=self.origin.IPRanges.read, + ) + ), + ) def test__client_maps_static_routes(self): - self.assertThat(self.client, MatchesClient( - static_routes=MatchesFacade( - create=self.origin.StaticRoutes.create, - get=self.origin.StaticRoute.read, - list=self.origin.StaticRoutes.read, - ) - )) + self.assertThat( + self.client, + MatchesClient( + static_routes=MatchesFacade( + create=self.origin.StaticRoutes.create, + get=self.origin.StaticRoute.read, + list=self.origin.StaticRoutes.read, + ) + ), + ) def test__client_maps_files(self): - self.assertThat(self.client, MatchesClient( - files=MatchesFacade( - list=self.origin.Files.read, - ), - )) + self.assertThat( + self.client, MatchesClient(files=MatchesFacade(list=self.origin.Files.read)) + ) def test__client_maps_machines(self): - self.assertThat(self.client, MatchesClient( - machines=MatchesFacade( - allocate=self.origin.Machines.allocate, - create=self.origin.Machines.create, - get=self.origin.Machine.read, - list=self.origin.Machines.read, + self.assertThat( + self.client, + MatchesClient( + machines=MatchesFacade( + allocate=self.origin.Machines.allocate, + create=self.origin.Machines.create, + get=self.origin.Machine.read, + list=self.origin.Machines.read, + ) ), - )) + ) def test__client_maps_rack_controllers(self): - self.assertThat(self.client, MatchesClient( - rack_controllers=MatchesFacade( - get=self.origin.RackController.read, - list=self.origin.RackControllers.read, + self.assertThat( + self.client, + MatchesClient( + rack_controllers=MatchesFacade( + get=self.origin.RackController.read, + list=self.origin.RackControllers.read, + ) ), - )) + ) def test__client_maps_region_controllers(self): - self.assertThat(self.client, MatchesClient( - region_controllers=MatchesFacade( - get=self.origin.RegionController.read, - list=self.origin.RegionControllers.read, + self.assertThat( + self.client, + MatchesClient( + region_controllers=MatchesFacade( + get=self.origin.RegionController.read, + list=self.origin.RegionControllers.read, + ) ), - )) + ) def test__client_maps_ssh_keys(self): - self.assertThat(self.client, MatchesClient( - ssh_keys=MatchesFacade( - create=self.origin.SSHKeys.create, - get=self.origin.SSHKey.read, - list=self.origin.SSHKeys.read, - ) - )) + self.assertThat( + self.client, + MatchesClient( + ssh_keys=MatchesFacade( + create=self.origin.SSHKeys.create, + get=self.origin.SSHKey.read, + list=self.origin.SSHKeys.read, + ) + ), + ) def test__client_maps_tags(self): - self.assertThat(self.client, MatchesClient( - tags=MatchesFacade( - create=self.origin.Tags.create, - list=self.origin.Tags.read, + self.assertThat( + self.client, + MatchesClient( + tags=MatchesFacade( + create=self.origin.Tags.create, list=self.origin.Tags.read + ) ), - )) + ) def test__client_maps_users(self): - self.assertThat(self.client, MatchesClient( - users=MatchesFacade( - create=self.origin.Users.create, - list=self.origin.Users.read, - whoami=self.origin.Users.whoami, + self.assertThat( + self.client, + MatchesClient( + users=MatchesFacade( + create=self.origin.Users.create, + list=self.origin.Users.read, + whoami=self.origin.Users.whoami, + ) ), - )) + ) def test__client_maps_version(self): - self.assertThat(self.client, MatchesClient( - version=MatchesFacade( - get=self.origin.Version.read, - ), - )) + self.assertThat( + self.client, + MatchesClient(version=MatchesFacade(get=self.origin.Version.read)), + ) def test__client_maps_zones(self): - self.assertThat(self.client, MatchesClient( - zones=MatchesFacade( - create=self.origin.Zones.create, - get=self.origin.Zone.read, - list=self.origin.Zones.read, + self.assertThat( + self.client, + MatchesClient( + zones=MatchesFacade( + create=self.origin.Zones.create, + get=self.origin.Zone.read, + list=self.origin.Zones.read, + ) ), - )) + ) + + def test__client_maps_pods(self): + self.assertThat( + self.client, + MatchesClient( + pods=MatchesFacade( + create=self.origin.Pods.create, + get=self.origin.Pod.read, + list=self.origin.Pods.read, + ) + ), + ) + + def test__client_maps_resource_pools(self): + self.assertThat( + self.client, + MatchesClient( + resource_pools=MatchesFacade( + create=self.origin.ResourcePools.create, + get=self.origin.ResourcePool.read, + list=self.origin.ResourcePools.read, + ) + ), + ) def MatchesClient(**facades): """Matches a `facade.Client` with the given facades.""" return MatchesAll( - IsInstance(facade.Client), - MatchesStructure(**facades), - first_only=True, + IsInstance(facade.Client), MatchesStructure(**facades), first_only=True ) diff --git a/maas/client/utils/__init__.py b/maas/client/utils/__init__.py index a3cd88a4..1ae5a928 100644 --- a/maas/client/utils/__init__.py +++ b/maas/client/utils/__init__.py @@ -26,38 +26,21 @@ "vars_class", ] -from collections import ( - Iterable, - namedtuple, -) -from functools import ( - lru_cache, - partial, -) -from inspect import ( - cleandoc, - getdoc, -) -from itertools import ( - chain, - cycle, - repeat, -) + +from collections import namedtuple +from collections.abc import Iterable +from functools import lru_cache, partial +from inspect import cleandoc, getdoc +from itertools import chain, cycle, repeat import re import sys import threading from time import time -from urllib.parse import ( - quote_plus, - urlparse, -) +from urllib.parse import quote_plus, urlparse from oauthlib import oauth1 -from .multipart import ( - build_multipart_message, - encode_multipart_message, -) +from .multipart import build_multipart_message, encode_multipart_message def urlencode(data): @@ -69,14 +52,13 @@ def urlencode(data): Unicode strings will be encoded to UTF-8. This is what Django expects; see `smart_text` in the Django documentation. """ + def dec(string): if isinstance(string, bytes): string = string.decode("utf-8") return quote_plus(string) - return "&".join( - "%s=%s" % (dec(name), dec(value)) - for name, value in data) + return "&".join("%s=%s" % (dec(name), dec(value)) for name, value in data) def prepare_payload(op, method, uri, data): @@ -103,8 +85,8 @@ def slurp(opener): if method == "GET": headers, body = [], None query.extend( - (name, slurp(value) if callable(value) else value) - for name, value in data) + (name, slurp(value) if callable(value) else value) for name, value in data + ) else: # Even if data is empty, construct a multipart request body. Piston # (server-side) sets `request.data` to `None` if there's no payload. @@ -119,8 +101,8 @@ class OAuthSigner: """Helper class to OAuth-sign an HTTP request.""" def __init__( - self, token_key, token_secret, consumer_key, consumer_secret, - realm="OAuth"): + self, token_key, token_secret, consumer_key, consumer_secret, realm="OAuth" + ): """Initialize a ``OAuthAuthorizer``. :type token_key: Unicode string. @@ -130,6 +112,7 @@ def __init__( :param realm: Optional. """ + def _to_unicode(string): if isinstance(string, bytes): return string.decode("ascii") @@ -152,9 +135,13 @@ def sign_request(self, url, method, body, headers): # The use of PLAINTEXT here was copied from MAAS, but we should switch # to HMAC once it works server-side. client = oauth1.Client( - self.consumer_key, self.consumer_secret, self.token_key, - self.token_secret, signature_method=oauth1.SIGNATURE_PLAINTEXT, - realm=self.realm) + self.consumer_key, + self.consumer_secret, + self.token_key, + self.token_secret, + signature_method=oauth1.SIGNATURE_PLAINTEXT, + realm=self.realm, + ) # To preserve API backward compatibility convert an empty string body # to `None`. The old "oauth" library would treat the empty string as # "no body", but "oauthlib" requires `None`. @@ -175,13 +162,13 @@ def sign(uri, headers, credentials): auth.sign_request(uri, method="GET", body=None, headers=headers) -re_paragraph_splitter = re.compile( - r"(?:\r\n){2,}|\r{2,}|\n{2,}", re.MULTILINE) +re_paragraph_splitter = re.compile(r"(?:\r\n){2,}|\r{2,}|\n{2,}", re.MULTILINE) paragraph_split = re_paragraph_splitter.split docstring_split = partial(paragraph_split, maxsplit=1) remove_line_breaks = lambda string: ( - " ".join(line.strip() for line in string.splitlines())) + " ".join(line.strip() for line in string.splitlines()) +) newline = "\n" empty = "" @@ -248,8 +235,7 @@ def vars_class(cls): This differs from the usual behaviour of `vars` which returns attributes belonging to the given class and not its ancestors. """ - return dict(chain.from_iterable( - vars(cls).items() for cls in reversed(cls.__mro__))) + return dict(chain.from_iterable(vars(cls).items() for cls in reversed(cls.__mro__))) def retries(timeout=30, intervals=1, time=time): @@ -324,11 +310,7 @@ def coalesce(*values, default=None): def remove_None(params: dict): """Remove all keys in `params` that have the value of `None`.""" - return { - key: value - for key, value in params.items() - if value is not None - } + return {key: value for key, value in params.items() if value is not None} class SpinnerContext: @@ -346,7 +328,7 @@ def print(self, *args, **kwargs): the line printed doesn't overwrite an already existing spinner line. """ clear_len = max(len(self._prev_msg), len(self.msg)) + 4 - self.spinner.stream.write("%s\r" % (' ' * clear_len)) + self.spinner.stream.write("%s\r" % (" " * clear_len)) print(*args, file=self.spinner.stream, flush=True, **kwargs) @@ -356,7 +338,7 @@ class Spinner: Use as a context manager. """ - def __init__(self, frames='/-\|', stream=sys.stdout): + def __init__(self, frames=r"/-\|", stream=sys.stdout): super(Spinner, self).__init__() self.frames = frames self.stream = stream @@ -376,21 +358,21 @@ def run(): # Write out successive frames (and a backspace) every 0.1 # seconds until done is set. while not done.wait(0.1): - diff = ( - len(self.__context._prev_msg) - - len(self.__context.msg)) + diff = len(self.__context._prev_msg) - len(self.__context.msg) if diff < 0: diff = 0 - stream.write("[%s] %s%s\r" % ( - next(frames), self.__context.msg, ' ' * diff)) + stream.write( + "[%s] %s%s\r" + % (next(frames), self.__context.msg, " " * diff) + ) self.__context._prev_msg = self.__context.msg stream.flush() finally: # Clear line and enable cursor. - clear_len = max( - len(self.__context._prev_msg), - len(self.__context.msg)) + 4 - stream.write("%s\r" % (' ' * clear_len)) + clear_len = ( + max(len(self.__context._prev_msg), len(self.__context.msg)) + 4 + ) + stream.write("%s\r" % (" " * clear_len)) stream.write("\033[?25h") stream.flush() diff --git a/maas/client/utils/auth.py b/maas/client/utils/auth.py index 7c2d176d..deba7223 100644 --- a/maas/client/utils/auth.py +++ b/maas/client/utils/auth.py @@ -14,10 +14,7 @@ """MAAS CLI authentication.""" -__all__ = [ - "obtain_credentials", - "try_getpass", -] +__all__ = ["obtain_credentials", "try_getpass"] from getpass import getpass import sys @@ -42,8 +39,7 @@ def obtain_credentials(credentials): if credentials == "-": credentials = sys.stdin.readline().strip() elif credentials is None: - credentials = try_getpass( - "API key (leave empty for anonymous access): ") + credentials = try_getpass("API key (leave empty for anonymous access): ") # Ensure that the credentials have a valid form. if credentials and not credentials.isspace(): return Credentials.parse(credentials) diff --git a/maas/client/utils/creds.py b/maas/client/utils/creds.py index efea361a..38dc8383 100644 --- a/maas/client/utils/creds.py +++ b/maas/client/utils/creds.py @@ -23,16 +23,15 @@ processes. """ -__all__ = [ - "Credentials", - ] +__all__ = ["Credentials"] from collections import namedtuple import typing CredentialsBase = namedtuple( - "CredentialsBase", ("consumer_key", "token_key", "token_secret")) + "CredentialsBase", ("consumer_key", "token_key", "token_secret") +) class Credentials(CredentialsBase): @@ -71,7 +70,8 @@ def parse(cls, credentials) -> typing.Optional["Credentials"]: else: raise ValueError( "Malformed credentials. Expected 3 colon-separated " - "parts, got %r." % (credentials, )) + "parts, got %r." % (credentials,) + ) else: parts = list(credentials) if len(parts) == 0: @@ -81,7 +81,8 @@ def parse(cls, credentials) -> typing.Optional["Credentials"]: else: raise ValueError( "Malformed credentials. Expected 3 parts, " - "got %r." % (credentials, )) + "got %r." % (credentials,) + ) def __str__(self): return ":".join(self) diff --git a/maas/client/utils/diff.py b/maas/client/utils/diff.py index ffc0a563..6dde19f0 100644 --- a/maas/client/utils/diff.py +++ b/maas/client/utils/diff.py @@ -14,9 +14,7 @@ """Helpers for calulating difference between objects.""" -__all__ = [ - "calculate_dict_diff", -] +__all__ = ["calculate_dict_diff"] from . import remove_None @@ -37,7 +35,7 @@ def calculate_dict_diff(old_params: dict, new_params: dict): if value != new_params[key]: params_diff[key] = new_params[key] else: - params_diff[key] = '' + params_diff[key] = "" for key, value in new_params.items(): if key not in old_params: params_diff[key] = value diff --git a/maas/client/utils/async.py b/maas/client/utils/maas_async.py similarity index 94% rename from maas/client/utils/async.py rename to maas/client/utils/maas_async.py index 117c7a9c..19b8bc31 100644 --- a/maas/client/utils/async.py +++ b/maas/client/utils/maas_async.py @@ -14,18 +14,11 @@ """Asynchronous helpers, for use with `asyncio`.""" -__all__ = [ - "asynchronous", - "Asynchronous", - "is_loop_running", -] +__all__ = ["asynchronous", "Asynchronous", "is_loop_running"] from asyncio import get_event_loop from functools import wraps -from inspect import ( - isawaitable, - iscoroutinefunction, -) +from inspect import isawaitable, iscoroutinefunction def asynchronous(func): @@ -40,6 +33,7 @@ def asynchronous(func): function from outside of the event-loop, and so makes interactive use of these APIs far more intuitive. """ + @wraps(func) def wrapper(*args, **kwargs): eventloop = get_event_loop() diff --git a/maas/client/utils/multipart.py b/maas/client/utils/multipart.py index 41ba8f3e..53f5ea54 100644 --- a/maas/client/utils/multipart.py +++ b/maas/client/utils/multipart.py @@ -14,21 +14,13 @@ """Encoding of MIME multipart data.""" -__all__ = [ - 'encode_multipart_data', - ] - -from collections import ( - Iterable, - Mapping, -) +__all__ = ["encode_multipart_data"] + +from collections.abc import Iterable, Mapping from email.generator import BytesGenerator from email.mime.application import MIMEApplication from email.mime.multipart import MIMEMultipart -from io import ( - BytesIO, - IOBase, -) +from io import BytesIO, IOBase from itertools import chain import mimetypes @@ -62,8 +54,7 @@ def make_string_payload(name, content): def make_file_payload(name, content): payload = MIMEApplication(content.read()) - payload.add_header( - "Content-Disposition", "form-data", name=name, filename=name) + payload.add_header("Content-Disposition", "form-data", name=name, filename=name) names = name, getattr(content, "name", None) payload.set_type(get_content_type(*names)) return payload @@ -114,8 +105,7 @@ def make_payloads(name, content): for payload in make_payloads(name, part): yield payload else: - raise AssertionError( - "%r is unrecognised: %r" % (name, content)) + raise AssertionError("%r is unrecognised: %r" % (name, content)) def build_multipart_message(data): diff --git a/maas/client/utils/profiles.py b/maas/client/utils/profiles.py index a9d09a4c..8da4aff2 100644 --- a/maas/client/utils/profiles.py +++ b/maas/client/utils/profiles.py @@ -14,11 +14,7 @@ """Profile configuration.""" -__all__ = [ - "Profile", - "ProfileStore", - "ProfileNotFound", -] +__all__ = ["Profile", "ProfileStore", "ProfileNotFound"] from contextlib import contextmanager from copy import deepcopy @@ -39,12 +35,18 @@ class Profile(tuple): __slots__ = () def __new__( - cls, name: str, url: str, *, - credentials: typing.Union[Credentials, typing.Sequence, str, None], - description: dict, **other: JSONObject): - return super(Profile, cls).__new__(cls, ( - name, api_url(url), Credentials.parse(credentials), - description, other)) + cls, + name: str, + url: str, + *, + credentials: typing.Union[Credentials, typing.Sequence, str, None], + description: dict, + **other: JSONObject + ): + return super(Profile, cls).__new__( + cls, + (name, api_url(url), Credentials.parse(credentials), description, other), + ) @property def name(self) -> str: @@ -101,25 +103,29 @@ def dump(self): Use this value when persisting a profile. """ return dict( - self.other, name=self.name, url=self.url, - credentials=self.credentials, description=self.description, + self.other, + name=self.name, + url=self.url, + credentials=self.credentials, + description=self.description, ) def __repr__(self): if self.credentials is None: return "<%s %s (anonymous) %s>" % ( - self.__class__.__name__, self.name, self.url) + self.__class__.__name__, + self.name, + self.url, + ) else: - return "<%s %s %s>" % ( - self.__class__.__name__, self.name, self.url) + return "<%s %s %s>" % (self.__class__.__name__, self.name, self.url) class ProfileNotFound(Exception): """The named profile was not found.""" def __init__(self, name): - super(ProfileNotFound, self).__init__( - "Profile '%s' not found." % (name,)) + super(ProfileNotFound, self).__init__("Profile '%s' not found." % (name,)) def schema_create(conn): @@ -130,13 +136,17 @@ def schema_create(conn): :param conn: A connection to an SQLite3 database. """ - conn.execute(dedent("""\ + conn.execute( + dedent( + """\ CREATE TABLE IF NOT EXISTS profiles (id INTEGER PRIMARY KEY, name TEXT NOT NULL UNIQUE, data BLOB NOT NULL, selected BOOLEAN NOT NULL DEFAULT FALSE) - """)) + """ + ) + ) # Partial indexes are only available in >=3.8.0 and expressions in indexes # are only available in >=3.9.0 (https://www.sqlite.org/partialindex.html # & https://www.sqlite.org/expridx.html). Don't bother with any kind of @@ -144,11 +154,15 @@ def schema_create(conn): if sqlite3.sqlite_version_info >= (3, 9, 0): # This index is for data integrity -- ensuring that only one profile # is the default ("selected") profile -- and speed a distant second. - conn.execute(dedent("""\ + conn.execute( + dedent( + """\ CREATE UNIQUE INDEX IF NOT EXISTS only_one_profile_selected ON profiles (selected IS NOT NULL) WHERE selected - """)) + """ + ) + ) def schema_import(conn, dbpath): @@ -162,14 +176,14 @@ def schema_import(conn, dbpath): profiles. :param dbpath: The filesystem path to the source SQLite3 database. """ - conn.execute( - "ATTACH DATABASE ? AS source", (str(dbpath),)) + conn.execute("ATTACH DATABASE ? AS source", (str(dbpath),)) conn.execute( "INSERT OR IGNORE INTO profiles (name, data)" " SELECT name, data FROM source.profiles" - " WHERE data IS NOT NULL") - conn.execute( - "DETACH DATABASE source") + " WHERE data IS NOT NULL" + ) + conn.commit() # need to commit before detaching the other db + conn.execute("DETACH DATABASE source") class ProfileStore: @@ -185,8 +199,8 @@ def __iter__(self): def load(self, name: str) -> Profile: found = self.database.execute( - "SELECT data FROM profiles" - " WHERE name = ?", (name,)).fetchone() + "SELECT data FROM profiles" " WHERE name = ?", (name,) + ).fetchone() if found is None: raise ProfileNotFound(name) else: @@ -205,22 +219,22 @@ def save(self, profile: Profile): # Ensure there's a row for this profile. self.database.execute( "INSERT OR IGNORE INTO profiles (name, data) VALUES (?, '')", - (profile.name,)) + (profile.name,), + ) # Update the row's data. self.database.execute( - "UPDATE profiles SET data = ? WHERE name = ?", - (data, profile.name)) + "UPDATE profiles SET data = ? WHERE name = ?", (data, profile.name) + ) def delete(self, name: str): - self.database.execute( - "DELETE FROM profiles WHERE name = ?", (name,)) + self.database.execute("DELETE FROM profiles WHERE name = ?", (name,)) @property def default(self) -> typing.Optional[Profile]: """The name of the default profile to use, or `None`.""" found = self.database.execute( - "SELECT name, data FROM profiles WHERE selected" - " ORDER BY name LIMIT 1").fetchone() + "SELECT name, data FROM profiles WHERE selected" " ORDER BY name LIMIT 1" + ).fetchone() if found is None: return None else: @@ -234,8 +248,8 @@ def default(self, profile: Profile): self.save(profile) del self.default self.database.execute( - "UPDATE profiles SET selected = (name = ?)", - (profile.name,)) + "UPDATE profiles SET selected = (name = ?)", (profile.name,) + ) @default.deleter def default(self): @@ -243,7 +257,11 @@ def default(self): @classmethod @contextmanager - def open(cls, dbpath=Path("~/.maas.db").expanduser()): + def open( + cls, + dbpath=Path("~/.maas.db").expanduser(), + migrate_from=Path("~/.maascli.db").expanduser(), + ): """Load a profiles database. Called without arguments this will open (and create) a database in the @@ -253,11 +271,12 @@ def open(cls, dbpath=Path("~/.maas.db").expanduser()): database on exit, saving if the exit is clean. :param dbpath: The path to the database file to create and open. + :param migrate_from: Path to a database file to migrate from. """ # Ensure we're working with a Path instance. dbpath = Path(dbpath) + migrate_from = Path(migrate_from) # See if we ought to do a one-time migration. - migrate_from = Path("~/.maascli.db").expanduser() migrate = migrate_from.is_file() and not dbpath.exists() # Initialise filename with restrictive permissions... dbpath.touch(mode=0o600, exist_ok=True) @@ -272,7 +291,7 @@ def open(cls, dbpath=Path("~/.maas.db").expanduser()): yield store else: yield store - except: + except: # noqa: E722 raise else: database.commit() diff --git a/maas/client/utils/testing/__init__.py b/maas/client/utils/testing/__init__.py index 6f8f714f..f13beedb 100644 --- a/maas/client/utils/testing/__init__.py +++ b/maas/client/utils/testing/__init__.py @@ -6,7 +6,7 @@ def make_Credentials(): return Credentials( - make_name_without_spaces('consumer_key'), - make_name_without_spaces('token_key'), - make_name_without_spaces('secret_key'), + make_name_without_spaces("consumer_key"), + make_name_without_spaces("token_key"), + make_name_without_spaces("secret_key"), ) diff --git a/maas/client/utils/tests/test.py b/maas/client/utils/tests/test.py index f5187461..acd8e271 100644 --- a/maas/client/utils/tests/test.py +++ b/maas/client/utils/tests/test.py @@ -31,15 +31,10 @@ from twisted.internet.task import Clock from ... import utils -from ...testing import ( - make_name_without_spaces, - make_string, - TestCase, -) +from ...testing import make_name_without_spaces, make_string, TestCase class TestMAASOAuth(TestCase): - def test_OAuthSigner_sign_request_adds_header(self): token_key = make_name_without_spaces("token-key") token_secret = make_name_without_spaces("token-secret") @@ -49,18 +44,23 @@ def test_OAuthSigner_sign_request_adds_header(self): headers = {} auth = utils.OAuthSigner( - token_key=token_key, token_secret=token_secret, - consumer_key=consumer_key, consumer_secret=consumer_secret, - realm=realm) - auth.sign_request('http://example.com/', "GET", None, headers) + token_key=token_key, + token_secret=token_secret, + consumer_key=consumer_key, + consumer_secret=consumer_secret, + realm=realm, + ) + auth.sign_request("http://example.com/", "GET", None, headers) - self.assertIn('Authorization', headers) + self.assertIn("Authorization", headers) authorization = headers["Authorization"] self.assertIn('realm="%s"' % realm, authorization) self.assertIn('oauth_token="%s"' % token_key, authorization) self.assertIn('oauth_consumer_key="%s"' % consumer_key, authorization) - self.assertIn('oauth_signature="%s%%26%s"' % ( - consumer_secret, token_secret), authorization) + self.assertIn( + 'oauth_signature="%s%%26%s"' % (consumer_secret, token_secret), + authorization, + ) def test_sign_adds_header(self): token_key = make_name_without_spaces("token-key") @@ -68,10 +68,11 @@ def test_sign_adds_header(self): consumer_key = make_name_without_spaces("consumer-key") headers = {} - utils.sign('http://example.com/', headers, ( - consumer_key, token_key, token_secret)) + utils.sign( + "http://example.com/", headers, (consumer_key, token_key, token_secret) + ) - self.assertIn('Authorization', headers) + self.assertIn("Authorization", headers) authorization = headers["Authorization"] self.assertIn('realm="OAuth"', authorization) self.assertIn('oauth_token="%s"' % token_key, authorization) @@ -88,112 +89,194 @@ class TestPayloadPreparation(TestCase): scenarios_without_op = ( # Without data, all requests have an empty request body and no extra # headers. - ("create", - {"method": "POST", "data": [], - "expected_uri": uri_base, - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), - ("read", - {"method": "GET", "data": [], - "expected_uri": uri_base, - "expected_body": None, - "expected_headers": []}), - ("update", - {"method": "PUT", "data": [], - "expected_uri": uri_base, - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), - ("delete", - {"method": "DELETE", "data": [], - "expected_uri": uri_base, - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), + ( + "create", + { + "method": "POST", + "data": [], + "expected_uri": uri_base, + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), + ( + "read", + { + "method": "GET", + "data": [], + "expected_uri": uri_base, + "expected_body": None, + "expected_headers": [], + }, + ), + ( + "update", + { + "method": "PUT", + "data": [], + "expected_uri": uri_base, + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), + ( + "delete", + { + "method": "DELETE", + "data": [], + "expected_uri": uri_base, + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), # With data, PUT, POST, and DELETE requests have their body and # extra headers prepared by build_multipart_message and # encode_multipart_message. For GET requests, the data is # encoded into the query string, and both the request body and # extra headers are empty. - ("create-with-data", - {"method": "POST", "data": [("foo", "bar"), ("foo", "baz")], - "expected_uri": uri_base, - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), - ("read-with-data", - {"method": "GET", "data": [("foo", "bar"), ("foo", "baz")], - "expected_uri": uri_base + "?foo=bar&foo=baz", - "expected_body": None, - "expected_headers": []}), - ("update-with-data", - {"method": "PUT", "data": [("foo", "bar"), ("foo", "baz")], - "expected_uri": uri_base, - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), - ("delete-with-data", - {"method": "DELETE", "data": [("foo", "bar"), ("foo", "baz")], - "expected_uri": uri_base, - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), - ) + ( + "create-with-data", + { + "method": "POST", + "data": [("foo", "bar"), ("foo", "baz")], + "expected_uri": uri_base, + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), + ( + "read-with-data", + { + "method": "GET", + "data": [("foo", "bar"), ("foo", "baz")], + "expected_uri": uri_base + "?foo=bar&foo=baz", + "expected_body": None, + "expected_headers": [], + }, + ), + ( + "update-with-data", + { + "method": "PUT", + "data": [("foo", "bar"), ("foo", "baz")], + "expected_uri": uri_base, + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), + ( + "delete-with-data", + { + "method": "DELETE", + "data": [("foo", "bar"), ("foo", "baz")], + "expected_uri": uri_base, + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), + ) # Scenarios for non-ReSTful operations; i.e. with an "op" parameter. scenarios_with_op = ( # Without data, all requests have an empty request body and no extra # headers. The operation is encoded into the query string. - ("create", - {"method": "POST", "data": [], - "expected_uri": uri_base + "?op=something", - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), - ("read", - {"method": "GET", "data": [], - "expected_uri": uri_base + "?op=something", - "expected_body": None, - "expected_headers": []}), - ("update", - {"method": "PUT", "data": [], - "expected_uri": uri_base + "?op=something", - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), - ("delete", - {"method": "DELETE", "data": [], - "expected_uri": uri_base + "?op=something", - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), + ( + "create", + { + "method": "POST", + "data": [], + "expected_uri": uri_base + "?op=something", + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), + ( + "read", + { + "method": "GET", + "data": [], + "expected_uri": uri_base + "?op=something", + "expected_body": None, + "expected_headers": [], + }, + ), + ( + "update", + { + "method": "PUT", + "data": [], + "expected_uri": uri_base + "?op=something", + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), + ( + "delete", + { + "method": "DELETE", + "data": [], + "expected_uri": uri_base + "?op=something", + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), # With data, PUT, POST, and DELETE requests have their body and # extra headers prepared by build_multipart_message and # encode_multipart_message. For GET requests, the data is # encoded into the query string, and both the request body and # extra headers are empty. The operation is encoded into the # query string. - ("create-with-data", - {"method": "POST", "data": [("foo", "bar"), ("foo", "baz")], - "expected_uri": uri_base + "?op=something", - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), - ("read-with-data", - {"method": "GET", "data": [("foo", "bar"), ("foo", "baz")], - "expected_uri": uri_base + "?op=something&foo=bar&foo=baz", - "expected_body": None, - "expected_headers": []}), - ("update-with-data", - {"method": "PUT", "data": [("foo", "bar"), ("foo", "baz")], - "expected_uri": uri_base + "?op=something", - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), - ("delete-with-data", - {"method": "DELETE", "data": [("foo", "bar"), ("foo", "baz")], - "expected_uri": uri_base + "?op=something", - "expected_body": sentinel.body, - "expected_headers": sentinel.headers}), - ) + ( + "create-with-data", + { + "method": "POST", + "data": [("foo", "bar"), ("foo", "baz")], + "expected_uri": uri_base + "?op=something", + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), + ( + "read-with-data", + { + "method": "GET", + "data": [("foo", "bar"), ("foo", "baz")], + "expected_uri": uri_base + "?op=something&foo=bar&foo=baz", + "expected_body": None, + "expected_headers": [], + }, + ), + ( + "update-with-data", + { + "method": "PUT", + "data": [("foo", "bar"), ("foo", "baz")], + "expected_uri": uri_base + "?op=something", + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), + ( + "delete-with-data", + { + "method": "DELETE", + "data": [("foo", "bar"), ("foo", "baz")], + "expected_uri": uri_base + "?op=something", + "expected_body": sentinel.body, + "expected_headers": sentinel.headers, + }, + ), + ) scenarios_without_op = tuple( ("%s-without-op" % name, dict(scenario, op=None)) - for name, scenario in scenarios_without_op) + for name, scenario in scenarios_without_op + ) scenarios_with_op = tuple( ("%s-with-op" % name, dict(scenario, op="something")) - for name, scenario in scenarios_with_op) + for name, scenario in scenarios_with_op + ) scenarios = scenarios_without_op + scenarios_with_op @@ -207,13 +290,13 @@ def test_prepare_payload(self): # The payload returned is a 3-tuple of (uri, body, headers). Pass # `data` as an iterator to ensure that it works with non-sized types. payload = utils.prepare_payload( - op=self.op, method=self.method, - uri=self.uri_base, data=iter(self.data)) + op=self.op, method=self.method, uri=self.uri_base, data=iter(self.data) + ) expected = ( Equals(self.expected_uri), Equals(self.expected_body), Equals(self.expected_headers), - ) + ) self.assertThat(payload, MatchesListwise(expected)) # encode_multipart_message, when called, is passed the data # unadulterated. @@ -234,7 +317,8 @@ def test_files_are_included(self): # open file handle. data = [(parameter, partial(filepath.open, "rb"))] uri, body, headers = utils.prepare_payload( - op=None, method="POST", uri="http://localhost", data=data) + op=None, method="POST", uri="http://localhost", data=data + ) expected_body_template = """\ --... @@ -247,7 +331,10 @@ def test_files_are_included(self): --...-- """ expected_body = expected_body_template % ( - parameter, parameter, base64.b64encode(contents).decode("ascii")) + parameter, + parameter, + base64.b64encode(contents).decode("ascii"), + ) self.assertDocTestMatches(expected_body, body.decode("ascii")) @@ -256,35 +343,33 @@ class TestDocstringParsing(TestCase): """Tests for docstring parsing with `parse_docstring`.""" def test_basic(self): - self.assertEqual( - ("Title", "Body"), - utils.parse_docstring("Title\n\nBody")) + self.assertEqual(("Title", "Body"), utils.parse_docstring("Title\n\nBody")) self.assertEqual( ("A longer title", "A longer body"), - utils.parse_docstring("A longer title\n\nA longer body")) + utils.parse_docstring("A longer title\n\nA longer body"), + ) def test_returns_named_tuple(self): self.assertThat( utils.parse_docstring("Title\n\nBody"), - MatchesStructure.byEquality(title="Title", body="Body")) + MatchesStructure.byEquality(title="Title", body="Body"), + ) def test_no_body(self): # parse_docstring returns an empty string when there's no body. - self.assertEqual( - ("Title", ""), - utils.parse_docstring("Title\n\n")) - self.assertEqual( - ("Title", ""), - utils.parse_docstring("Title")) + self.assertEqual(("Title", ""), utils.parse_docstring("Title\n\n")) + self.assertEqual(("Title", ""), utils.parse_docstring("Title")) def test_unwrapping(self): # parse_docstring unwraps the title paragraph, and dedents the body # paragraphs. self.assertEqual( - ("Title over two lines", - "Paragraph over\ntwo lines\n\n" - "Another paragraph\nover two lines"), - utils.parse_docstring(""" + ( + "Title over two lines", + "Paragraph over\ntwo lines\n\n" "Another paragraph\nover two lines", + ), + utils.parse_docstring( + """ Title over two lines @@ -293,7 +378,9 @@ def test_unwrapping(self): Another paragraph over two lines - """)) + """ + ), + ) def test_gets_docstring_from_function(self): # parse_docstring can extract the docstring when the argument passed @@ -303,38 +390,37 @@ def example(): Body. """ - self.assertEqual( - ("Title.", "Body."), - utils.parse_docstring(example)) + + self.assertEqual(("Title.", "Body."), utils.parse_docstring(example)) def test_normalises_whitespace(self): # parse_docstring can parse CRLF/CR/LF text, but always emits LF (\n, # new-line) separated text. - self.assertEqual( - ("long title", ""), - utils.parse_docstring("long\r\ntitle")) + self.assertEqual(("long title", ""), utils.parse_docstring("long\r\ntitle")) self.assertEqual( ("title", "body1\n\nbody2"), - utils.parse_docstring("title\n\nbody1\r\rbody2")) + utils.parse_docstring("title\n\nbody1\r\rbody2"), + ) class TestFunctions(TestCase): """Tests for miscellaneous functions in `maas.client.utils`.""" def test_api_url(self): - transformations = list({ - "http://example.com/": "http://example.com/api/2.0/", - "http://example.com/foo": "http://example.com/foo/api/2.0/", - "http://example.com/foo/": "http://example.com/foo/api/2.0/", - "http://example.com/api/7.9": "http://example.com/api/7.9/", - "http://example.com/api/7.9/": "http://example.com/api/7.9/", - }.items()) + transformations = list( + { + "http://example.com/": "http://example.com/api/2.0/", + "http://example.com/foo": "http://example.com/foo/api/2.0/", + "http://example.com/foo/": "http://example.com/foo/api/2.0/", + "http://example.com/api/7.9": "http://example.com/api/7.9/", + "http://example.com/api/7.9/": "http://example.com/api/7.9/", + }.items() + ) urls = [url for url, url_out in transformations] urls_out = [url_out for url, url_out in transformations] expected = [ - AfterPreprocessing(utils.api_url, Equals(url_out)) - for url_out in urls_out - ] + AfterPreprocessing(utils.api_url, Equals(url_out)) for url_out in urls_out + ] self.assertThat(urls, MatchesListwise(expected)) def test_coalesce(self): @@ -348,19 +434,23 @@ def test_coalesce(self): class TestRetries(TestCase): - def assertRetry( - self, clock, observed, expected_elapsed, expected_remaining, - expected_wait): + self, clock, observed, expected_elapsed, expected_remaining, expected_wait + ): """Assert that the retry tuple matches the given expectations. Retry tuples are those returned by `retries`. """ - self.assertThat(observed, MatchesListwise([ - Equals(expected_elapsed), # elapsed - Equals(expected_remaining), # remaining - Equals(expected_wait), # wait - ])) + self.assertThat( + observed, + MatchesListwise( + [ + Equals(expected_elapsed), # elapsed + Equals(expected_remaining), # remaining + Equals(expected_wait), # wait + ] + ), + ) def test_yields_elapsed_remaining_and_wait(self): # Take control of time. @@ -459,9 +549,5 @@ class TestRemoveNone(TestCase): """Test `remove_None`.""" def test_removes_all_None_values(self): - data = { - 'None': None, - 'another_None': None, - 'keep': 'value' - } - self.assertEquals({'keep': 'value'}, utils.remove_None(data)) + data = {"None": None, "another_None": None, "keep": "value"} + self.assertEquals({"keep": "value"}, utils.remove_None(data)) diff --git a/maas/client/utils/tests/test_async.py b/maas/client/utils/tests/test_async.py index 284c8672..6bf464ee 100644 --- a/maas/client/utils/tests/test_async.py +++ b/maas/client/utils/tests/test_async.py @@ -17,13 +17,9 @@ import asyncio from inspect import isawaitable -from testtools.matchers import ( - Equals, - Is, - MatchesPredicate, -) +from testtools.matchers import Equals, Is, MatchesPredicate -from .. import async +from .. import maas_async from ...testing import TestCase @@ -35,30 +31,28 @@ class TestAsynchronousWrapper(TestCase): def test_returns_plain_result_unaltered_when_loop_not_running(self): token = object() - func = async.asynchronous(lambda: token) + func = maas_async.asynchronous(lambda: token) self.assertThat(func(), Is(token)) def test_returns_plain_result_unaltered_when_loop_running(self): token = object() - func = async.asynchronous(lambda: token) + func = maas_async.asynchronous(lambda: token) async def within_event_loop(): loop = asyncio.get_event_loop() self.assertTrue(loop.is_running()) return func() - self.assertThat( - self.loop.run_until_complete(within_event_loop()), - Is(token)) + self.assertThat(self.loop.run_until_complete(within_event_loop()), Is(token)) def test_blocks_on_awaitable_result_when_loop_not_running(self): token = asyncio.sleep(0.0) - func = async.asynchronous(lambda: token) + func = maas_async.asynchronous(lambda: token) self.assertThat(func(), Is(None)) def test_returns_awaitable_result_unaltered_when_loop_running(self): token = asyncio.sleep(0.0) - func = async.asynchronous(lambda: token) + func = maas_async.asynchronous(lambda: token) async def within_event_loop(): loop = asyncio.get_event_loop() @@ -78,7 +72,7 @@ class TestAsynchronousType(TestCase): def test_callable_attributes_are_wrapped(self): # `Asynchronous` groks class- and static-methods. - class Class(metaclass=async.Asynchronous): + class Class(metaclass=maas_async.Asynchronous): attribute = 123 diff --git a/maas/client/utils/tests/test_auth.py b/maas/client/utils/tests/test_auth.py index 73a6b75b..dc6b2945 100644 --- a/maas/client/utils/tests/test_auth.py +++ b/maas/client/utils/tests/test_auth.py @@ -15,10 +15,7 @@ """Tests for `maas.client.utils.auth`.""" import sys -from unittest.mock import ( - ANY, - sentinel, -) +from unittest.mock import ANY, sentinel from .. import auth from ...testing import TestCase @@ -26,7 +23,6 @@ class TestAuth(TestCase): - def test_try_getpass(self): getpass = self.patch(auth, "getpass") getpass.return_value = sentinel.credentials diff --git a/maas/client/utils/tests/test_creds.py b/maas/client/utils/tests/test_creds.py index bf3965ab..ea833016 100644 --- a/maas/client/utils/tests/test_creds.py +++ b/maas/client/utils/tests/test_creds.py @@ -25,7 +25,7 @@ class TestCredentials(TestCase): def test_str_form_is_colon_separated_triple(self): creds = Credentials("foo", "bar", "baz") - self.assertEqual(':'.join(creds), str(creds)) + self.assertEqual(":".join(creds), str(creds)) def test_parse_reads_a_colon_separated_triple(self): creds = Credentials.parse("foo:bar:baz") diff --git a/maas/client/utils/tests/test_diff.py b/maas/client/utils/tests/test_diff.py index 27857636..8b30c01e 100644 --- a/maas/client/utils/tests/test_diff.py +++ b/maas/client/utils/tests/test_diff.py @@ -24,42 +24,29 @@ class TestCalculateDictDiff(TestCase): """Test `calculate_dict_diff`.""" def test_calcs_no_difference(self): - orig_data = { - 'key1': 'value1', - 'key2': 'value2', - } + orig_data = {"key1": "value1", "key2": "value2"} new_data = copy.deepcopy(orig_data) self.assertEquals({}, calculate_dict_diff(orig_data, new_data)) def test_calcs_changed_value(self): - orig_data = { - 'key1': 'value1', - 'key2': 'value2', - } + orig_data = {"key1": "value1", "key2": "value2"} new_data = copy.deepcopy(orig_data) - new_data['key2'] = 'new_value' + new_data["key2"] = "new_value" self.assertEquals( - {'key2': 'new_value'}, calculate_dict_diff(orig_data, new_data)) + {"key2": "new_value"}, calculate_dict_diff(orig_data, new_data) + ) def test_calcs_deleted_value(self): - orig_data = { - 'key1': 'value1', - 'key2': 'value2', - } + orig_data = {"key1": "value1", "key2": "value2"} new_data = copy.deepcopy(orig_data) - del new_data['key2'] - self.assertEquals( - {'key2': ''}, calculate_dict_diff(orig_data, new_data)) + del new_data["key2"] + self.assertEquals({"key2": ""}, calculate_dict_diff(orig_data, new_data)) def test_calcs_changes_and_deleted(self): - orig_data = { - 'key1': 'value1', - 'key2': 'value2', - } + orig_data = {"key1": "value1", "key2": "value2"} new_data = copy.deepcopy(orig_data) - new_data['key1'] = 'new_value' - del new_data['key2'] - self.assertEquals({ - 'key1': 'new_value', - 'key2': '', - }, calculate_dict_diff(orig_data, new_data)) + new_data["key1"] = "new_value" + del new_data["key2"] + self.assertEquals( + {"key1": "new_value", "key2": ""}, calculate_dict_diff(orig_data, new_data) + ) diff --git a/maas/client/utils/tests/test_multipart.py b/maas/client/utils/tests/test_multipart.py index 66af9edd..9aa0699b 100644 --- a/maas/client/utils/tests/test_multipart.py +++ b/maas/client/utils/tests/test_multipart.py @@ -21,19 +21,10 @@ from django.core.files.uploadhandler import MemoryFileUploadHandler from django.http.multipartparser import MultiPartParser from django.utils.datastructures import MultiValueDict -from testtools.matchers import ( - EndsWith, - StartsWith, -) +from testtools.matchers import EndsWith, StartsWith -from ...testing import ( - make_string, - TestCase, -) -from ..multipart import ( - encode_multipart_data, - get_content_type, -) +from ...testing import make_string, TestCase +from ..multipart import encode_multipart_data, get_content_type # Django, sigh, needs this. settings.configure() @@ -42,7 +33,8 @@ ahem_django_ahem = ( "If the mismatch appears to be because the parsed values " "are base64 encoded, then check you're using a >=1.4 release " - "of Django.") + "of Django." +) def parse_headers_and_body_with_django(headers, body): @@ -70,83 +62,60 @@ def parse_headers_and_body_with_django(headers, body): "CONTENT_LENGTH": headers["Content-Length"], } parser = MultiPartParser( - META=meta, input_data=BytesIO(body), - upload_handlers=[handler]) + META=meta, input_data=BytesIO(body), upload_handlers=[handler] + ) return parser.parse() class TestMultiPart(TestCase): - def test_get_content_type_guesses_type(self): - guess = get_content_type('text.txt') - self.assertEqual('text/plain', guess) + guess = get_content_type("text.txt") + self.assertEqual("text/plain", guess) self.assertIsInstance(guess, str) def test_encode_multipart_data_produces_bytes(self): - data = {make_string(): make_string().encode('ascii')} - files = {make_string(): BytesIO(make_string().encode('ascii'))} + data = {make_string(): make_string().encode("ascii")} + files = {make_string(): BytesIO(make_string().encode("ascii"))} body, headers = encode_multipart_data(data, files) self.assertIsInstance(body, bytes) def test_encode_multipart_data_closes_with_closing_boundary_line(self): - data = {'foo': make_string().encode('ascii')} - files = {'bar': BytesIO(make_string().encode('ascii'))} + data = {"foo": make_string().encode("ascii")} + files = {"bar": BytesIO(make_string().encode("ascii"))} body, headers = encode_multipart_data(data, files) - self.assertThat(body, EndsWith(b'--')) + self.assertThat(body, EndsWith(b"--")) def test_encode_multipart_data(self): # The encode_multipart_data() function should take a list of # parameters and files and encode them into a MIME # multipart/form-data suitable for posting to the MAAS server. params = { - "op": { - "value": "add", - "output": "add", - }, - "foo": { - "value": "bar\u1234", - "output": "bar\u1234", - }, - "none": { - "value": None, - "output": "", - }, - "true": { - "value": True, - "output": "true", - }, - "false": { - "value": False, - "output": "false", - }, - "int": { - "value": 1, - "output": "1", - }, - "bytes": { - "value": b"bytes", - "output": "bytes", - }, + "op": {"value": "add", "output": "add"}, + "foo": {"value": "bar\u1234", "output": "bar\u1234"}, + "none": {"value": None, "output": ""}, + "true": {"value": True, "output": "true"}, + "false": {"value": False, "output": "false"}, + "int": {"value": 1, "output": "1"}, + "bytes": {"value": b"bytes", "output": "bytes"}, } random_data = urandom(32) files = {"baz": BytesIO(random_data)} - body, headers = encode_multipart_data({ - key: value["value"] - for key, value in params.items() - }, files) + body, headers = encode_multipart_data( + {key: value["value"] for key, value in params.items()}, files + ) self.assertEqual("%s" % len(body), headers["Content-Length"]) self.assertThat( - headers["Content-Type"], - StartsWith("multipart/form-data; boundary=")) + headers["Content-Type"], StartsWith("multipart/form-data; boundary=") + ) # Round-trip through Django's multipart code. post, files = parse_headers_and_body_with_django(headers, body) self.assertEqual( - {name: [value["output"]] for name, value in params.items()}, post, - ahem_django_ahem) + {name: [value["output"]] for name, value in params.items()}, + post, + ahem_django_ahem, + ) self.assertSetEqual({"baz"}, set(files)) - self.assertEqual( - random_data, files["baz"].read(), - ahem_django_ahem) + self.assertEqual(random_data, files["baz"].read(), ahem_django_ahem) def test_encode_multipart_data_multiple_params(self): # Sequences of parameters and files passed to @@ -154,11 +123,7 @@ def test_encode_multipart_data_multiple_params(self): # multiple parameters and/or files. See `make_payloads` to # understand how it processes different types of parameter # values. - params_in = [ - ("one", "ABC"), - ("one", "XYZ"), - ("two", ["DEF", "UVW"]), - ] + params_in = [("one", "ABC"), ("one", "XYZ"), ("two", ["DEF", "UVW"])] files = [ BytesIO(b"f1"), self.makeFile(contents=b"f2").open("rb"), @@ -174,32 +139,23 @@ def test_encode_multipart_data_multiple_params(self): body, headers = encode_multipart_data(params_in, files_in) self.assertEqual("%s" % len(body), headers["Content-Length"]) self.assertThat( - headers["Content-Type"], - StartsWith("multipart/form-data; boundary=")) + headers["Content-Type"], StartsWith("multipart/form-data; boundary=") + ) # Round-trip through Django's multipart code. - params_out, files_out = ( - parse_headers_and_body_with_django(headers, body)) + params_out, files_out = parse_headers_and_body_with_django(headers, body) params_out_expected = MultiValueDict() params_out_expected.appendlist("one", "ABC") params_out_expected.appendlist("one", "XYZ") params_out_expected.appendlist("two", "DEF") params_out_expected.appendlist("two", "UVW") - self.assertEqual( - params_out_expected, params_out, - ahem_django_ahem) + self.assertEqual(params_out_expected, params_out, ahem_django_ahem) files_expected = {"f-one": b"f1", "f-two": b"f2", "f-three": b"f3"} files_observed = {name: buf.read() for name, buf in files_out.items()} - self.assertEqual( - files_expected, files_observed, - ahem_django_ahem) + self.assertEqual(files_expected, files_observed, ahem_django_ahem) def test_encode_multipart_data_list_params(self): - params_in = [ - ("one", ["ABC", "XYZ"]), - ("one", "UVW"), - ] + params_in = [("one", ["ABC", "XYZ"]), ("one", "UVW")] body, headers = encode_multipart_data(params_in, []) - params_out, files_out = ( - parse_headers_and_body_with_django(headers, body)) - self.assertEqual({'one': ['ABC', 'XYZ', 'UVW']}, params_out) + params_out, files_out = parse_headers_and_body_with_django(headers, body) + self.assertEqual({"one": ["ABC", "XYZ", "UVW"]}, params_out) self.assertSetEqual(set(), set(files_out)) diff --git a/maas/client/utils/tests/test_profiles.py b/maas/client/utils/tests/test_profiles.py index 002421fd..835dd1ee 100644 --- a/maas/client/utils/tests/test_profiles.py +++ b/maas/client/utils/tests/test_profiles.py @@ -18,23 +18,11 @@ from pathlib import Path import sqlite3 -from testtools.matchers import ( - Equals, - Is, - Not, -) +from testtools.matchers import Equals, Is, Not from twisted.python.filepath import FilePath -from .. import profiles -from ...testing import ( - make_name_without_spaces, - TestCase, -) -from ..profiles import ( - Profile, - ProfileNotFound, - ProfileStore, -) +from ...testing import make_name_without_spaces, TestCase +from ..profiles import Profile, ProfileNotFound, ProfileStore from ..testing import make_Credentials @@ -42,13 +30,15 @@ def make_profile(name=None): if name is None: name = make_name_without_spaces("name") return Profile( - name=name, url="http://example.com:5240/", - credentials=make_Credentials(), description={"resources": []}, - something=make_name_without_spaces("something")) + name=name, + url="http://example.com:5240/", + credentials=make_Credentials(), + description={"resources": []}, + something=make_name_without_spaces("something"), + ) class TestProfile(TestCase): - def test__instances_are_immutable(self): profile = make_profile() self.assertRaises(AttributeError, setattr, profile, "name", "foo") @@ -67,36 +57,46 @@ def test__replace_returns_a_new_profile(self): def test__replace_returns_a_new_profile_with_modifications(self): profile1 = make_profile() - profile2 = profile1.replace( - name=profile1.name + "basil", hello="world") + profile2 = profile1.replace(name=profile1.name + "basil", hello="world") self.assertThat(profile2.name, Equals(profile1.name + "basil")) - self.assertThat(profile2.other, Equals( - dict(profile1.other, hello="world"))) + self.assertThat(profile2.other, Equals(dict(profile1.other, hello="world"))) def test__dump_returns_dict_with_all_state(self): profile = make_profile() - self.assertThat(profile.dump(), Equals({ - "name": profile.name, - "url": profile.url, - "credentials": profile.credentials, - "description": profile.description, - "something": profile.other["something"], - })) + self.assertThat( + profile.dump(), + Equals( + { + "name": profile.name, + "url": profile.url, + "credentials": profile.credentials, + "description": profile.description, + "something": profile.other["something"], + } + ), + ) def test__representation(self): profile = make_profile() - self.assertThat(repr(profile), Equals( - "".format(profile))) + self.assertThat( + repr(profile), Equals("".format(profile)) + ) def test__representation_of_anonymous_profile(self): profile = make_profile().replace(credentials=None) - self.assertThat(repr(profile), Equals( - "".format(profile))) + self.assertThat( + repr(profile), + Equals("".format(profile)), + ) class TestProfileStore(TestCase): """Tests for `ProfileStore`.""" + def setUp(self): + super(TestProfileStore, self).setUp() + self.null_profile = Path("/dev/null") + def test_init(self): database = sqlite3.connect(":memory:") config = ProfileStore(database) @@ -105,8 +105,10 @@ def test_init(self): config.database.execute( "SELECT COUNT(*) FROM sqlite_master" " WHERE type = 'table'" - " AND name = 'profiles'").fetchone(), - (1,)) + " AND name = 'profiles'" + ).fetchone(), + (1,), + ) def test_profiles_pristine(self): # A pristine configuration has no profiles. @@ -164,17 +166,14 @@ def test_open_and_close(self): self.assertIsInstance(config, contextlib._GeneratorContextManager) with config as config: self.assertIsInstance(config, ProfileStore) - self.assertEqual( - (1,), config.database.execute("SELECT 1").fetchone()) - self.assertRaises( - sqlite3.ProgrammingError, config.database.execute, - "SELECT 1") + self.assertEqual((1,), config.database.execute("SELECT 1").fetchone()) + self.assertRaises(sqlite3.ProgrammingError, config.database.execute, "SELECT 1") def test_open_permissions_new_database(self): # ProfileStore.open() applies restrictive file permissions to newly # created configuration databases. config_file = self.makeDir().joinpath("config") - with ProfileStore.open(config_file): + with ProfileStore.open(config_file, self.null_profile): perms = FilePath(str(config_file)).getPermissions() self.assertEqual("rw-------", perms.shorthand()) @@ -184,7 +183,7 @@ def test_open_permissions_existing_database(self): config_file = self.makeDir().joinpath("config") config_file.touch() config_file.chmod(0o644) # u=rw,go=r - with ProfileStore.open(config_file): + with ProfileStore.open(config_file, self.null_profile): perms = FilePath(str(config_file)).getPermissions() self.assertEqual("rw-r--r--", perms.shorthand()) @@ -193,28 +192,17 @@ def test_open_does_one_time_migration(self): dbpath_old = home.joinpath(".maascli.db") dbpath_new = home.joinpath(".maas.db") - # Path.expanduser() is used by ProfileStore.open(). We expect the - # paths to be expanded to be one of those below. - def expanduser(path): - if path == Path("~/.maas.db"): - return dbpath_new - if path == Path("~/.maascli.db"): - return dbpath_old - raise ValueError(path) - - self.patch(profiles.Path, "expanduser", expanduser) - # A profile that will be migrated. profile = make_profile() # Populate the old database with a profile. We're using the new # ProfileStore but that's okay; the schemas are compatible. - with ProfileStore.open(dbpath_old) as config_old: + with ProfileStore.open(dbpath_old, self.null_profile) as config_old: config_old.save(profile) # Immediately as we open the new database, profiles from the old # database are migrated. - with ProfileStore.open(dbpath_new) as config_new: + with ProfileStore.open(dbpath_new, dbpath_old) as config_new: self.assertEqual({profile.name}, set(config_new)) profile_migrated = config_new.load(profile.name) self.assertEqual(profile, profile_migrated) @@ -223,11 +211,11 @@ def expanduser(path): # After reopening the new database we see the migrated profile that we # deleted has stayed deleted; it has not been migrated a second time. - with ProfileStore.open(dbpath_new) as config_new: + with ProfileStore.open(dbpath_new, self.null_profile) as config_new: self.assertRaises(ProfileNotFound, config_new.load, profile.name) # It is still present and correct in the old database. - with ProfileStore.open(dbpath_old) as config_old: + with ProfileStore.open(dbpath_old, self.null_profile) as config_old: self.assertEqual(profile, config_old.load(profile.name)) diff --git a/maas/client/utils/types.py b/maas/client/utils/types.py index 235c2240..4f312006 100644 --- a/maas/client/utils/types.py +++ b/maas/client/utils/types.py @@ -14,17 +14,9 @@ """Miscellaneous types.""" -__all__ = [ - "JSONArray", - "JSONObject", - "JSONValue", -] +__all__ = ["JSONArray", "JSONObject", "JSONValue"] -from typing import ( - Dict, - Sequence, - Union, -) +from typing import Dict, Sequence, Union # # Types that can be represented in JSON. diff --git a/maas/client/viscera/__init__.py b/maas/client/viscera/__init__.py index 288678bc..40edd6fc 100644 --- a/maas/client/viscera/__init__.py +++ b/maas/client/viscera/__init__.py @@ -19,34 +19,21 @@ "OriginBase", ] -from collections import ( - defaultdict, - Iterable, - Mapping, - Sequence, -) -from copy import ( - copy, - deepcopy, -) +from collections.abc import Iterable, Mapping, Sequence +from collections import defaultdict +from copy import copy from datetime import datetime from functools import wraps from importlib import import_module -from itertools import ( - chain, - starmap, -) +from itertools import chain, starmap from types import MethodType import pytz from .. import bones from ..errors import ObjectNotLoaded -from ..utils import ( - get_all_subclasses, - vars_class, -) -from ..utils.async import Asynchronous +from ..utils import get_all_subclasses, vars_class +from ..utils.maas_async import Asynchronous undefined = object() @@ -64,8 +51,8 @@ def __call__(self, *args, **kwargs): raise RuntimeError("%s has been disabled" % (self.name,)) else: raise RuntimeError( - "%s has been disabled; use %s instead" % ( - self.name, self.alternative)) + "%s has been disabled; use %s instead" % (self.name, self.alternative) + ) def dir_class(cls): @@ -136,7 +123,7 @@ def __init__(self, name=None): def __get__(self, instance, owner): if self.name is None: - name = owner.__name__.split('.')[0] + name = owner.__name__.split(".")[0] return getattr(owner._origin, name.rstrip("s")) else: return getattr(owner._origin, self.name) @@ -146,7 +133,6 @@ def __set__(self, instance, value): class ObjectType(Asynchronous, metaclass=Asynchronous): - def __dir__(cls): return dir_class(cls) @@ -154,16 +140,18 @@ def __new__(cls, name, bases, attrs): attrs.setdefault("__slots__", ()) return super(ObjectType, cls).__new__(cls, name, bases, attrs) - def bind(cls, origin, handler, *, name=None): + def bind(cls, origin, handler, handlers, *, name=None): """Bind this object to the given origin and handler. :param origin: An instance of `Origin`. :param handler: An instance of `bones.HandlerAPI`. + :param handlers: All handlers from `bones`. :return: A subclass of this class. """ name = cls.__name__ if name is None else name attrs = { - "_origin": origin, "_handler": handler, + "_origin": origin, + "_handler": handler, "__module__": "origin", # Could do better? } return type(name, (cls,), attrs) @@ -215,15 +203,17 @@ def get_pk_descriptors(cls): } if unique_pk_fields: raise AttributeError( - "more than one field is marked as unique primary key: %s" % ( - ', '.join(sorted(pk_fields)))) - pk_descriptors = tuple(sorted(( - (name, descriptor) - for name, descriptor in pk_fields.items() - ), key=lambda item: item[1].pk)) + "more than one field is marked as unique primary key: %s" + % (", ".join(sorted(pk_fields))) + ) + pk_descriptors = tuple( + sorted( + ((name, descriptor) for name, descriptor in pk_fields.items()), + key=lambda item: item[1].pk, + ) + ) alt_pk_descriptors = tuple( - alt_pk_fields[idx] - for idx, (name, descriptor) in enumerate(pk_descriptors) + alt_pk_fields[idx] for idx, (name, descriptor) in enumerate(pk_descriptors) ) return pk_descriptors, alt_pk_descriptors else: @@ -241,27 +231,27 @@ def set_alt_pk_value(alt_descriptors, obj_data, data): class Object(ObjectBasics, metaclass=ObjectType): """An object in a MAAS installation.""" - __slots__ = ( - "__weakref__", "_data", "_orig_data", "_changed_data", "_loaded") + __slots__ = ("__weakref__", "_data", "_orig_data", "_changed_data", "_loaded") def __init__(self, data, local_data=None): super(Object, self).__init__() + self._data = data self._changed_data = {} self._loaded = False - if isinstance(data, Mapping) and not data.get('__incomplete__', False): - self._data = data + if isinstance(data, Mapping) and not data.get("__incomplete__", False): + self._reset(data) self._loaded = True else: descriptors, alt_descriptors = get_pk_descriptors(type(self)) if len(descriptors) == 1: if isinstance(data, Mapping): - self._data = {} + new_data = {} try: - self._data[descriptors[0][1].name] = ( - data[descriptors[0][1].name]) + new_data[descriptors[0][1].name] = data[descriptors[0][1].name] except KeyError: found_name = set_alt_pk_value( - alt_descriptors[0], self._data, data) + alt_descriptors[0], new_data, data + ) if found_name: # Validate that the set data is correct and # can be converted to the python value. @@ -272,13 +262,9 @@ def __init__(self, data, local_data=None): # Validate that the set data is correct and # can be converted to the python value. getattr(self, descriptors[0][0]) - # Reset self._data so _orig_data and _changed_data is - # correct. - self._data = self._data + self._reset(new_data) else: - self._data = { - descriptors[0][1].name: data - } + self._reset({descriptors[0][1].name: data}) # Validate that the primary key is correct and # can be converted to the python value. getattr(self, descriptors[0][0]) @@ -291,14 +277,15 @@ def __init__(self, data, local_data=None): obj_data[descriptor.name] = data[descriptor.name] except KeyError: found_name = set_alt_pk_value( - alt_descriptors[idx], obj_data, data) + alt_descriptors[idx], obj_data, data + ) if found_name: found_names.append(found_name) else: raise else: found_names.append(name) - self._data = obj_data + self._reset(obj_data) # Validate that all set data is correct and can be # converted to the python value. for name in found_names: @@ -307,32 +294,46 @@ def __init__(self, data, local_data=None): obj_data = {} for idx, (name, descriptor) in enumerate(descriptors): obj_data[descriptor.name] = data[idx] - self._data = obj_data + self._reset(obj_data) for name, _ in descriptors: # Validate that the primary key is correct and # can be converted to the python value. getattr(self, name) else: raise TypeError( - "data must be a mapping or a sequence, not %s" % ( - type(data).__name__)) + "data must be a mapping or a sequence, not %s" + % (type(data).__name__) + ) else: if not isinstance(data, Mapping): raise TypeError( - "data must be a mapping, not %s" - % type(data).__name__) + "data must be a mapping, not %s" % type(data).__name__ + ) else: raise ValueError( - "data cannot be incomplete without any primary keys " - "defined") - self._orig_data = deepcopy(self._data) + "data cannot be incomplete without any primary keys " "defined" + ) if local_data is not None: if isinstance(local_data, Mapping): self._data.update(local_data) else: raise TypeError( - "local_data must be a mapping, not %s" - % type(data).__name__) + "local_data must be a mapping, not %s" % type(data).__name__ + ) + + def _reset(self, data): + """Reset the object to look pristine with the given data. + + All tracked changes will be discarded, and the object will be + ready to track new changes. + """ + self._data = data + # Make a shallow copy of each item in the original data so that + # we can keep track of the changes. A shallow copy is enough, + # since we only care about changes that are directly related to + # this object. + self._orig_data = {key: copy(value) for key, value in data.items()} + self._changed_data = {} def __getattribute__(self, name): """Prevent access to fields that are not defined as primary keys on @@ -343,29 +344,18 @@ def __getattribute__(self, name): if isinstance(descriptor, ObjectField) } if name in fields: - if self.loaded: + if super().__getattribute__("_loaded"): return super(Object, self).__getattribute__(name) elif is_pk_descriptor(fields[name], include_alt=True): return super(Object, self).__getattribute__(name) else: raise ObjectNotLoaded( - "cannot access attribute '%s' of object '%s'" % ( - name, type(self).__name__)) + "cannot access attribute '%s' of object '%s'" + % (name, type(self).__name__) + ) else: return super(Object, self).__getattribute__(name) - def __setattr__(self, name, value): - """Handle `_data` being set directly. - - When `_data` is set directly we update the `_orig_data` and - `_changed_data`. - """ - ret = super(Object, self).__setattr__(name, value) - if name == '_data': - self._orig_data = deepcopy(value) - self._changed_data = {} - return ret - def __eq__(self, other): """Strict equality check. @@ -394,8 +384,10 @@ def __repr__(self, *, name=None, fields=None): fields = [] if fields is None: fields = sorted( - name for name, value in vars_class(type(self)).items() - if isinstance(value, ObjectField)) + name + for name, value in vars_class(type(self)).items() + if isinstance(value, ObjectField) + ) else: fields = sorted(fields) values = (getattr(self, name) for name in fields) @@ -408,9 +400,9 @@ def __repr__(self, *, name=None, fields=None): def __hash__(self): name = str(self.__class__.__name__) - if hasattr(self, 'id'): + if hasattr(self, "id"): return hash((name, self.id)) - if hasattr(self, 'system_id'): + if hasattr(self, "system_id"): return hash((name, self.system_id)) return None @@ -426,7 +418,7 @@ def loaded(self): async def refresh(self): """Refresh the object from MAAS.""" cls = type(self) - if hasattr(cls, 'read'): + if hasattr(cls, "read"): descriptors, alt_descriptors = get_pk_descriptors(cls) if len(descriptors) == 1: try: @@ -457,32 +449,33 @@ async def refresh(self): obj = await cls.read(*args) else: raise AttributeError( - "unable to perform 'refresh' no primary key " - "fields defined.") + "unable to perform 'refresh' no primary key " "fields defined." + ) if type(obj) is cls: - self._data = obj._data + self._reset(obj._data) self._loaded = True else: raise TypeError( - "result of '%s.read' must be '%s', not '%s'" % ( - cls.__name__, cls.__name__, type(obj).__name__)) + "result of '%s.read' must be '%s', not '%s'" + % (cls.__name__, cls.__name__, type(obj).__name__) + ) else: - raise AttributeError( - "'%s' object doesn't support refresh." % cls.__name__) + raise AttributeError("'%s' object doesn't support refresh." % cls.__name__) async def save(self): """Save the object in MAAS.""" if hasattr(self._handler, "update"): if self._changed_data: update_data = dict(self._changed_data) - update_data.update({ - key: self._orig_data[key] - for key in self._handler.params - }) - self._data = await self._handler.update(**update_data) + update_data.update( + {key: self._orig_data[key] for key in self._handler.params} + ) + data = await self._handler.update(**update_data) + self._reset(data) else: raise AttributeError( - "'%s' object doesn't support save." % type(self).__name__) + "'%s' object doesn't support save." % type(self).__name__ + ) def ManagedCreate(super_cls): @@ -498,23 +491,25 @@ def ManagedCreate(super_cls): @wraps(super_cls.create) async def _create(self, *args, **kwargs): cls = type(self) - manager = getattr(cls, '_manager', None) - manager_field = getattr(cls, '_manager_field', None) + manager = getattr(cls, "_manager", None) + manager_field = getattr(cls, "_manager_field", None) if manager is not None and manager_field is not None: args = (manager,) + args new_obj = await super_cls.create(*args, **kwargs) self._items = self._items + [new_obj] - manager._data[manager_field.name] = ( - manager._data[manager_field.name] + - [new_obj._data]) - manager._orig_data[manager_field.name] = ( - manager._orig_data[manager_field.name] + - [new_obj._data]) + manager._data[manager_field.name] = manager._data[manager_field.name] + [ + new_obj._data + ] + manager._orig_data[manager_field.name] = manager._orig_data[ + manager_field.name + ] + [new_obj._data] return new_obj else: raise AttributeError( - 'create is not supported; %s is not a managed set' % ( - super_cls.__name__)) + "create is not supported; %s is not a managed set" + % (super_cls.__name__) + ) + return _create @@ -535,31 +530,24 @@ def Managed(cls, manager, field, items): `ObjectSet`. :param items: The items in the `ObjectSet`. """ - attrs = { - "_manager": manager, - "_manager_field": field, - } + attrs = {"_manager": manager, "_manager_field": field} if hasattr(cls, "create"): - attrs['create'] = ManagedCreate(cls) + attrs["create"] = ManagedCreate(cls) cls = type( - "%s.Managed#%s" % ( - cls.__name__, manager.__class__.__name__), (cls,), attrs) + "%s.Managed#%s" % (cls.__name__, manager.__class__.__name__), (cls,), attrs + ) return cls(items) def __init__(self, items): super(ObjectSet, self).__init__() if isinstance(items, (Mapping, str, bytes)): - raise TypeError( - "data must be sequence-like, not %s" - % type(items).__name__) + raise TypeError("data must be sequence-like, not %s" % type(items).__name__) elif isinstance(items, Sequence): self._items = items elif isinstance(items, Iterable): self._items = list(items) else: - raise TypeError( - "data must be sequence-like, not %s" - % type(items).__name__) + raise TypeError("data must be sequence-like, not %s" % type(items).__name__) def __len__(self): """Return the count of items contained herein.""" @@ -609,7 +597,10 @@ def __eq__(self, other): def __repr__(self): return "<%s length=%d items=%r>" % ( - self.__class__.__name__, len(self._items), self._items) + self.__class__.__name__, + len(self._items), + self._items, + ) class ObjectField: @@ -661,21 +652,25 @@ def Checked(cls, name, datum_to_value=None, value_to_datum=None, **other): """ attrs = {} if datum_to_value is not None: + @wraps(datum_to_value) def datum_to_value_method(instance, datum): return datum_to_value(datum) + attrs["datum_to_value"] = staticmethod(datum_to_value_method) if value_to_datum is not None: + @wraps(value_to_datum) def value_to_datum_method(instance, value): return value_to_datum(value) + attrs["value_to_datum"] = staticmethod(value_to_datum_method) cls = type("%s.Checked#%s" % (cls.__name__, name), (cls,), attrs) return cls(name, **other) def __init__( - self, name, *, default=undefined, readonly=False, - pk=False, alt_pk=False): + self, name, *, default=undefined, readonly=False, pk=False, alt_pk=False + ): """Create a `ObjectField` with an optional default. :param name: The name of the field. This is the name that's used to @@ -695,22 +690,19 @@ def __init__( self.name = name self.default = default if not isinstance(readonly, bool): - raise TypeError( - 'readonly must be a bool, not %s' % type(readonly).__name__) + raise TypeError("readonly must be a bool, not %s" % type(readonly).__name__) else: self.readonly = readonly if not isinstance(pk, (bool, int)): - raise TypeError( - 'pk must be a bool or an int, not %s' % type(pk).__name__) + raise TypeError("pk must be a bool or an int, not %s" % type(pk).__name__) else: self.pk = pk if self.pk is not False and alt_pk is not False: - raise ValueError( - 'pk and alt_pk cannot be defined on the same field.') + raise ValueError("pk and alt_pk cannot be defined on the same field.") elif not isinstance(alt_pk, (bool, int)): raise TypeError( - 'alt_pk must be a bool or an int, not %s' % ( - type(alt_pk).__name__)) + "alt_pk must be a bool or an int, not %s" % (type(alt_pk).__name__) + ) else: self.alt_pk = alt_pk @@ -786,11 +778,19 @@ def __delete__(self, instance): class ObjectFieldRelated(ObjectField): - def __init__( - self, name, cls, *, - default=undefined, readonly=False, pk=False, alt_pk=False, - reverse=undefined, use_data_setter=False, map_func=None): + self, + name, + cls, + *, + default=undefined, + readonly=False, + pk=False, + alt_pk=False, + reverse=undefined, + use_data_setter=False, + map_func=None + ): """Create a `ObjectFieldRelated` with `cls`. :param name: The name of the field. This is the name that's used to @@ -813,7 +813,8 @@ def __init__( set on the object. """ super(ObjectFieldRelated, self).__init__( - name, default=default, readonly=readonly, pk=pk, alt_pk=alt_pk) + name, default=default, readonly=readonly, pk=pk, alt_pk=alt_pk + ) self.reverse = reverse self.use_data_setter = use_data_setter self.map_func = map_func @@ -821,8 +822,7 @@ def __init__( self.map_func = lambda instance, value: value if not isinstance(cls, str): if not issubclass(cls, Object): - raise TypeError( - "%s is not a subclass of Object" % cls) + raise TypeError("%s is not a subclass of Object" % cls) else: self.cls = cls.__name__ else: @@ -875,24 +875,20 @@ def value_to_datum(self, instance, value): if len(descriptors) == 1: return getattr(value, descriptors[0][0]) elif len(descriptors) > 1: - return tuple( - getattr(value, name) - for name, _ in descriptors - ) + return tuple(getattr(value, name) for name, _ in descriptors) else: raise AttributeError( "unable to perform set object no primary key " - "fields defined for %s" % self.cls) + "fields defined for %s" % self.cls + ) else: - raise TypeError( - "must be %s, not %s" % (self.cls, type(value).__name__)) + raise TypeError("must be %s, not %s" % (self.cls, type(value).__name__)) class ObjectFieldRelatedSet(ObjectField): - def __init__( - self, name, cls, *, reverse=undefined, default=undefined, - map_func=None): + self, name, cls, *, reverse=undefined, default=undefined, map_func=None + ): """Create a `ObjectFieldRelatedSet` with `cls`. :param name: The name of the field. This is the name that's used to @@ -907,15 +903,15 @@ def __init__( if default is undefined: default = [] super(ObjectFieldRelatedSet, self).__init__( - name, default=default, readonly=True) + name, default=default, readonly=True + ) self.reverse = reverse self.map_func = map_func if self.map_func is None: self.map_func = lambda instance, value: value if not isinstance(cls, str): if not issubclass(cls, ObjectSet): - raise TypeError( - "%s is not a subclass of ObjectSet" % cls) + raise TypeError("%s is not a subclass of ObjectSet" % cls) else: self.cls = cls.__name__ else: @@ -934,8 +930,7 @@ def datum_to_value(self, instance, datum): if datum is None: return [] if not isinstance(datum, Sequence): - raise TypeError( - "datum must be a sequence, not %s" % type(datum).__name__) + raise TypeError("datum must be a sequence, not %s" % type(datum).__name__) local_data = None if self.reverse is not None: local_data = {} @@ -946,16 +941,16 @@ def datum_to_value(self, instance, datum): # Get the class from the bound origin. bound = getattr(instance._origin, self.cls) return bound.Managed( - instance, self, + instance, + self, ( - bound._object( - self.map_func(instance, item), local_data=local_data) + bound._object(self.map_func(instance, item), local_data=local_data) for item in datum - )) + ), + ) class ObjectMethod: - def __init__(self, _classmethod=None, _instancemethod=None): super(ObjectMethod, self).__init__() self.__classmethod = _classmethod @@ -964,14 +959,14 @@ def __init__(self, _classmethod=None, _instancemethod=None): def __get__(self, instance, owner): if instance is None: if self.__classmethod is None: - raise AttributeError( - "%s has no matching class attribute" % (instance, )) + raise AttributeError("%s has no matching class attribute" % (instance,)) else: return MethodType(self.__classmethod, owner) else: if self.__instancemethod is None: raise AttributeError( - "%s has no matching instance attribute" % (instance, )) + "%s has no matching instance attribute" % (instance,) + ) else: return MethodType(self.__instancemethod, instance) @@ -979,8 +974,7 @@ def __set__(self, instance, value): # Non-data descriptors (those without __set__) can be shadowed by # instance attributes, so prevent that by making this a read-only data # descriptor. - raise AttributeError( - "%s has no matching instance attribute" % (instance, )) + raise AttributeError("%s has no matching instance attribute" % (instance,)) def classmethod(self, func): """Set/modify the class method.""" @@ -1029,7 +1023,7 @@ def __populate(self): handler = handlers.get(name, None) base = self.__objects.get(name, Object) assert issubclass(type(base), ObjectType) - obj = base.bind(self, handler, name=name) + obj = base.bind(self, handler, handlers, name=name) # Those objects without a custom class are "hidden" by prefixing # their name with an underscore. objname = "_%s" % name if base is Object else name @@ -1044,6 +1038,7 @@ def __populate(self): def to(cls): def to_cls(value): return cls(value) + return to_cls @@ -1052,8 +1047,8 @@ def checker(value): if issubclass(type(value), expected): return value else: - raise TypeError( - "%r is not of type %s" % (value, expected)) + raise TypeError("%r is not of type %s" % (value, expected)) + return checker @@ -1068,15 +1063,12 @@ def parse_timestamp(created): def mapping_of(cls): """Expects a mapping from some key to data for `cls` instances.""" + def mapper(data): if not isinstance(data, Mapping): - raise TypeError( - "data must be a mapping, not %s" - % type(data).__name__) - return { - key: cls(value) - for key, value in data.items() - } + raise TypeError("data must be a mapping, not %s" % type(data).__name__) + return {key: cls(value) for key, value in data.items()} + return mapper @@ -1094,10 +1086,7 @@ def find_objects(modules): """ return { subclass.__name__: subclass - for subclass in chain( - get_all_subclasses(Object), - get_all_subclasses(ObjectSet), - ) + for subclass in chain(get_all_subclasses(Object), get_all_subclasses(ObjectSet)) if subclass.__module__ in modules } @@ -1108,7 +1097,8 @@ class OriginType(Asynchronous): async def fromURL(cls, url, *, credentials=None, insecure=False): """Return an `Origin` for a given MAAS instance.""" session = await bones.SessionAPI.fromURL( - url, credentials=credentials, insecure=insecure) + url, credentials=credentials, insecure=insecure + ) return cls(session) def fromProfile(cls, profile): @@ -1127,8 +1117,7 @@ def fromProfileName(cls, name): session = bones.SessionAPI.fromProfileName(name) return cls(session) - async def login( - cls, url, *, username=None, password=None, insecure=False): + async def login(cls, url, *, username=None, password=None, insecure=False): """Make an `Origin` by logging-in with a username and password. :return: A tuple of ``profile`` and ``origin``, where the former is an @@ -1136,11 +1125,11 @@ async def login( made using the profile. """ profile, session = await bones.SessionAPI.login( - url=url, username=username, password=password, insecure=insecure) + url=url, username=username, password=password, insecure=insecure + ) return profile, cls(session) - async def connect( - cls, url, *, apikey=None, insecure=False): + async def connect(cls, url, *, apikey=None, insecure=False): """Make an `Origin` by connecting with an apikey. :return: A tuple of ``profile`` and ``origin``, where the former is an @@ -1148,7 +1137,8 @@ async def connect( made using the profile. """ profile, session = await bones.SessionAPI.connect( - url=url, apikey=apikey, insecure=insecure) + url=url, apikey=apikey, insecure=insecure + ) return profile, cls(session) def __dir__(cls): @@ -1170,21 +1160,35 @@ def __init__(self, session): modules = { ".", ".account", + ".bcache_cache_sets", + ".bcaches", + ".block_devices", ".boot_resources", - ".boot_sources", ".boot_source_selections", + ".boot_sources", ".controllers", ".devices", + ".dnsresources", + ".dnsresourcerecords", + ".domains", ".events", ".subnets", ".fabrics", ".spaces", ".files", + ".filesystem_groups", + ".filesystems", ".interfaces", ".ipranges", + ".ip_addresses", + ".logical_volumes", ".maas", ".machines", ".nodes", + ".partitions", + ".pods", + ".raids", + ".resource_pools", ".spaces", ".sshkeys", ".static_routes", @@ -1193,11 +1197,12 @@ def __init__(self, session): ".users", ".version", ".vlans", + ".volume_groups", ".zones", } super(Origin, self).__init__( - session, objects=find_objects({ - import_module(name, __name__).__name__ - for name in modules - }), + session, + objects=find_objects( + {import_module(name, __name__).__name__ for name in modules} + ), ) diff --git a/maas/client/viscera/account.py b/maas/client/viscera/account.py index 32410dc9..13ffbe83 100644 --- a/maas/client/viscera/account.py +++ b/maas/client/viscera/account.py @@ -1,13 +1,8 @@ """Objects for accounts.""" -__all__ = [ - "Account", -] - -from . import ( - Object, - ObjectType, -) +__all__ = ["Account"] + +from . import Object, ObjectType from ..utils.creds import Credentials @@ -17,12 +12,13 @@ class AccountType(ObjectType): async def create_credentials(cls) -> Credentials: data = await cls._handler.create_authorisation_token() return Credentials( - consumer_key=data["consumer_key"], token_key=data["token_key"], - token_secret=data["token_secret"]) + consumer_key=data["consumer_key"], + token_key=data["token_key"], + token_secret=data["token_secret"], + ) async def delete_credentials(cls, credentials: Credentials) -> None: - await cls._handler.delete_authorisation_token( - token_key=credentials.token_key) + await cls._handler.delete_authorisation_token(token_key=credentials.token_key) class Account(Object, metaclass=AccountType): diff --git a/maas/client/viscera/bcache_cache_sets.py b/maas/client/viscera/bcache_cache_sets.py new file mode 100644 index 00000000..b505bd1b --- /dev/null +++ b/maas/client/viscera/bcache_cache_sets.py @@ -0,0 +1,92 @@ +"""Objects for cache sets.""" + +__all__ = ["BcacheCacheSet", "BcacheCacheSets"] + +from typing import Union + +from . import ObjectSet, ObjectType +from .nodes import Node +from .block_devices import BlockDevice +from .partitions import Partition +from .filesystem_groups import DeviceField, FilesystemGroup + + +class BcacheCacheSetType(ObjectType): + """Metaclass for `BcacheCacheSet`.""" + + async def read(cls, node, id): + """Get `BcacheCacheSet` by `id`.""" + if isinstance(node, str): + system_id = node + elif isinstance(node, Node): + system_id = node.system_id + else: + raise TypeError("node must be a Node or str, not %s" % type(node).__name__) + return cls(await cls._handler.read(system_id=system_id, id=id)) + + +class BcacheCacheSet(FilesystemGroup, metaclass=BcacheCacheSetType): + """A cache set on a machine.""" + + cache_device = DeviceField("cache_device") + + def __repr__(self): + return super(BcacheCacheSet, self).__repr__(fields={"name", "cache_device"}) + + async def delete(self): + """Delete this cache set.""" + await self._handler.delete(system_id=self.node.system_id, id=self.id) + + +class BcacheCacheSetsType(ObjectType): + """Metaclass for `BcacheCacheSets`.""" + + async def read(cls, node): + """Get list of `BcacheCacheSet`'s for `node`.""" + if isinstance(node, str): + system_id = node + elif isinstance(node, Node): + system_id = node.system_id + else: + raise TypeError("node must be a Node or str, not %s" % type(node).__name__) + data = await cls._handler.read(system_id=system_id) + return cls( + cls._object(item, local_data={"node_system_id": system_id}) for item in data + ) + + async def create( + cls, node: Union[Node, str], cache_device: Union[BlockDevice, Partition] + ): + """ + Create a BcacheCacheSet on a Node. + + :param node: Node to create the interface on. + :type node: `Node` or `str` + :param cache_device: Block device or partition to create + the cache set on. + :type cache_device: `BlockDevice` or `Partition` + """ + params = {} + if isinstance(node, str): + params["system_id"] = node + elif isinstance(node, Node): + params["system_id"] = node.system_id + else: + raise TypeError( + "node must be a Node or str, not %s" % (type(node).__name__) + ) + if isinstance(cache_device, BlockDevice): + params["cache_device"] = cache_device.id + elif isinstance(cache_device, Partition): + params["cache_partition"] = cache_device.id + else: + raise TypeError( + "cache_device must be a BlockDevice or Partition, not %s" + % (type(cache_device).__name__) + ) + + return cls._object(await cls._handler.create(**params)) + + +class BcacheCacheSets(ObjectSet, metaclass=BcacheCacheSetsType): + """The set of cache sets on a machine.""" diff --git a/maas/client/viscera/bcaches.py b/maas/client/viscera/bcaches.py new file mode 100644 index 00000000..c554852e --- /dev/null +++ b/maas/client/viscera/bcaches.py @@ -0,0 +1,148 @@ +"""Objects for Bcaches.""" + +__all__ = ["Bcache", "Bcaches"] + +from typing import Union + +from . import ObjectField, ObjectFieldRelated, ObjectSet, ObjectType, to, check +from .nodes import Node +from .bcache_cache_sets import BcacheCacheSet +from .block_devices import BlockDevice +from .partitions import Partition +from .filesystem_groups import DeviceField, FilesystemGroup +from ..enum import CacheMode + + +class BcacheType(ObjectType): + """Metaclass for `Bcache`.""" + + async def read(cls, node, id): + """Get `Bcache` by `id`.""" + if isinstance(node, str): + system_id = node + elif isinstance(node, Node): + system_id = node.system_id + else: + raise TypeError("node must be a Node or str, not %s" % type(node).__name__) + return cls(await cls._handler.read(system_id=system_id, id=id)) + + +class Bcache(FilesystemGroup, metaclass=BcacheType): + """A Bcache on a machine.""" + + cache_mode = ObjectField.Checked("cache_mode", to(CacheMode), check(CacheMode)) + uuid = ObjectField.Checked("uuid", check(str), check(str)) + + backing_device = DeviceField("backing_device") + cache_set = ObjectFieldRelated("cache_set", "BcacheCacheSet", reverse=None) + virtual_device = ObjectFieldRelated( + "virtual_device", "BlockDevice", reverse=None, readonly=True + ) + + def __repr__(self): + return super(Bcache, self).__repr__( + fields={"name", "cache_mode", "size", "backing_device"} + ) + + async def delete(self): + """Delete this Bcache.""" + await self._handler.delete(system_id=self.node.system_id, id=self.id) + + +class BcachesType(ObjectType): + """Metaclass for `Bcaches`.""" + + async def read(cls, node): + """Get list of `Bcache`'s for `node`.""" + if isinstance(node, str): + system_id = node + elif isinstance(node, Node): + system_id = node.system_id + else: + raise TypeError("node must be a Node or str, not %s" % type(node).__name__) + data = await cls._handler.read(system_id=system_id) + return cls( + cls._object(item, local_data={"node_system_id": system_id}) for item in data + ) + + async def create( + cls, + node: Union[Node, str], + name: str, + backing_device: Union[BlockDevice, Partition], + cache_set: Union[BcacheCacheSet, int], + cache_mode: CacheMode, + *, + uuid: str = None + ): + """ + Create a Bcache on a Node. + + :param node: Node to create the interface on. + :type node: `Node` or `str` + :param name: Name of the Bcache. + :type name: `str` + :param backing_device: Either a block device or partition to create + the Bcache from. + :type backing_device: `BlockDevice` or `Partition` + :param cache_set: Bcache cache set to use in front of backing device. + :type cache_set: `BcacheCacheSet` or `int` + :param cache_mode: Caching mode to use for this device. + :type cache_mode: `CacheMode` + :type backing_device: `BlockDevice` or `Partition` + :param uuid: The UUID for the Bcache (optional). + :type uuid: `str` + """ + params = {"name": name} + if isinstance(node, str): + params["system_id"] = node + elif isinstance(node, Node): + params["system_id"] = node.system_id + else: + raise TypeError( + "node must be a Node or str, not %s" % (type(node).__name__) + ) + + if isinstance(backing_device, BlockDevice): + params["backing_device"] = backing_device.id + elif isinstance(backing_device, Partition): + params["backing_partition"] = backing_device.id + else: + raise TypeError( + "backing_device must be a BlockDevice or Partition, " + "not %s" % type(backing_device).__name__ + ) + + if isinstance(cache_set, BcacheCacheSet): + params["cache_set"] = cache_set.id + elif isinstance(cache_set, int): + params["cache_set"] = cache_set + else: + raise TypeError( + "cache_set must be a BcacheCacheSet or int, " + "not %s" % type(cache_set).__name__ + ) + + if isinstance(cache_mode, CacheMode): + params["cache_mode"] = cache_mode.value + else: + raise TypeError( + "cache_mode must be a CacheMode, " "not %s" % type(cache_mode).__name__ + ) + + if uuid is not None: + params["uuid"] = uuid + return cls._object(await cls._handler.create(**params)) + + +class Bcaches(ObjectSet, metaclass=BcachesType): + """The set of Bcaches on a machine.""" + + @property + def by_name(self): + """Return mapping of name to `Bcache`.""" + return {bcache.name: bcache for bcache in self} + + def get_by_name(self, name): + """Return a `Bcache` by its name.""" + return self.by_name[name] diff --git a/maas/client/viscera/block_devices.py b/maas/client/viscera/block_devices.py new file mode 100644 index 00000000..c23df3ee --- /dev/null +++ b/maas/client/viscera/block_devices.py @@ -0,0 +1,244 @@ +"""Objects for block devices.""" + +__all__ = ["BlockDevice", "BlockDevices"] + +from typing import Iterable, Union + +from . import ( + check, + check_optional, + Object, + ObjectField, + ObjectFieldRelated, + ObjectFieldRelatedSet, + ObjectSet, + ObjectType, + to, +) +from .nodes import Node +from ..enum import BlockDeviceType, PartitionTableType +from ..utils import remove_None + + +class BlockDeviceTypeMeta(ObjectType): + """Metaclass for `BlockDevice`.""" + + async def read(cls, node, id): + """Get `BlockDevice` by `id`.""" + if isinstance(node, str): + system_id = node + elif isinstance(node, Node): + system_id = node.system_id + else: + raise TypeError("node must be a Node or str, not %s" % type(node).__name__) + return cls(await cls._handler.read(system_id=system_id, id=id)) + + +class BlockDevice(Object, metaclass=BlockDeviceTypeMeta): + """A block device on a machine.""" + + node = ObjectFieldRelated("system_id", "Node", readonly=True, pk=0) + id = ObjectField.Checked("id", check(int), readonly=True, pk=1) + type = ObjectField.Checked("type", to(BlockDeviceType), readonly=True) + name = ObjectField.Checked("name", check(str), check(str), alt_pk=1) + model = ObjectField.Checked("model", check_optional(str), check_optional(str)) + serial = ObjectField.Checked("serial", check_optional(str), check_optional(str)) + id_path = ObjectField.Checked("id_path", check_optional(str), check_optional(str)) + size = ObjectField.Checked("size", check(int), check(int)) + block_size = ObjectField.Checked("block_size", check(int), check(int)) + uuid = ObjectField.Checked("uuid", check(str), check(str)) + tags = ObjectField.Checked("tags", check(list), check(list)) + + available_size = ObjectField.Checked("available_size", check(int), readonly=True) + used_size = ObjectField.Checked("used_size", check(int), readonly=True) + used_for = ObjectField.Checked("used_for", check(str), readonly=True) + partition_table_type = ObjectField.Checked( + "partition_table_type", to(PartitionTableType), readonly=True + ) + + partitions = ObjectFieldRelatedSet("partitions", "Partitions") + filesystem = ObjectFieldRelated("filesystem", "Filesystem", readonly=True) + + def __repr__(self): + if self.type == BlockDeviceType.PHYSICAL: + return super(BlockDevice, self).__repr__( + name="PhysicalBlockDevice", + fields={"name", "model", "serial", "id_path"}, + ) + elif self.type == BlockDeviceType.VIRTUAL: + return super(BlockDevice, self).__repr__( + name="VirtualBlockDevice", fields={"name"} + ) + else: + raise ValueError("Unknown type: %s" % self.type) + + async def save(self): + """Save this block device.""" + old_tags = list(self._orig_data["tags"]) + new_tags = list(self.tags) + self._changed_data.pop("tags", None) + await super(BlockDevice, self).save() + for tag_name in new_tags: + if tag_name not in old_tags: + await self._handler.add_tag( + system_id=self.node.system_id, id=self.id, tag=tag_name + ) + else: + old_tags.remove(tag_name) + for tag_name in old_tags: + await self._handler.remove_tag( + system_id=self.node.system_id, id=self.id, tag=tag_name + ) + self._orig_data["tags"] = new_tags + self._data["tags"] = list(new_tags) + + async def delete(self): + """Delete this block device.""" + await self._handler.delete(system_id=self.node.system_id, id=self.id) + + async def set_as_boot_disk(self): + """Set as boot disk for this node.""" + await self._handler.set_boot_disk(system_id=self.node.system_id, id=self.id) + + async def format(self, fstype, *, uuid=None): + """Format this block device.""" + self._reset( + await self._handler.format( + system_id=self.node.system_id, id=self.id, fstype=fstype, uuid=uuid + ) + ) + + async def unformat(self): + """Unformat this block device.""" + self._reset( + await self._handler.unformat(system_id=self.node.system_id, id=self.id) + ) + + async def mount(self, mount_point, *, mount_options=None): + """Mount this block device.""" + self._reset( + await self._handler.mount( + system_id=self.node.system_id, + id=self.id, + mount_point=mount_point, + mount_options=mount_options, + ) + ) + + async def unmount(self): + """Unmount this block device.""" + self._reset( + await self._handler.unmount(system_id=self.node.system_id, id=self.id) + ) + + +class BlockDevicesType(ObjectType): + """Metaclass for `BlockDevices`.""" + + async def read(cls, node): + """Get list of `BlockDevice`'s for `node`.""" + if isinstance(node, str): + system_id = node + elif isinstance(node, Node): + system_id = node.system_id + else: + raise TypeError("node must be a Node or str, not %s" % type(node).__name__) + data = await cls._handler.read(system_id=system_id) + return cls( + cls._object(item, local_data={"node_system_id": system_id}) for item in data + ) + + async def create( + cls, + node: Union[Node, str], + name: str, + *, + model: str = None, + serial: str = None, + id_path: str = None, + size: int = None, + block_size: int = 512, + tags: Iterable[str] = None + ): + """ + Create a physical block device on a Node. + + Either model and serial or id_path must be provided when creating a + `BlockDevice`. Size (bytes) is always required. + + NOTE: It is recommended to use the MAAS commissioning process to + discover `BlockDevice`'s on a machine. Getting any of this information + incorrect can result on the machine failing to deploy. + + :param node: Node to create the block device on. + :type node: `Node` or `str` + :param name: The name for the block device. + :type name: `str` + :param model: The model number for the block device. + :type model: `str` + :param serial: The serial number for the block device. + :type serial: `str` + :param id_path: Unique path that identifies the device no matter + the kernel the machine boots. Use only when the block device + does not have a model and serial number. + :type id_path: `str` + :param size: The size of the block device in bytes. + :type size: `int` + :param block_size: The block size of the block device in bytes. + :type block_size: `int` + :param tags: List of tags to add to the block device. + :type tags: sequence of `str` + """ + params = {} + if isinstance(node, str): + params["system_id"] = node + elif isinstance(node, Node): + params["system_id"] = node.system_id + else: + raise TypeError( + "node must be a Node or str, not %s" % (type(node).__name__) + ) + + if not size or size < 0: + raise ValueError("size must be provided and greater than zero.") + if not block_size or block_size < 0: + raise ValueError("block_size must be provided and greater than zero.") + if model and not serial: + raise ValueError("serial must be provided when model is provided.") + if not model and serial: + raise ValueError("model must be provided when serial is provided.") + if not model and not serial and not id_path: + raise ValueError( + "Either model/serial is provided or id_path must be provided." + ) + + params.update( + remove_None( + { + "name": name, + "model": model, + "serial": serial, + "id_path": id_path, + "size": size, + "block_size": block_size, + } + ) + ) + device = cls._object(await cls._handler.create(**params)) + if tags: + device.tags = tags + await device.save() + return device + + +class BlockDevices(ObjectSet, metaclass=BlockDevicesType): + """The set of block devices on a machine.""" + + @property + def by_name(self): + """Return mapping of name of block device to `BlockDevice`.""" + return {bd.name: bd for bd in self} + + def get_by_name(self, name): + """Return a `BlockDevice` by its name.""" + return self.by_name[name] diff --git a/maas/client/viscera/boot_resources.py b/maas/client/viscera/boot_resources.py index 4a8f948b..2ab51c40 100644 --- a/maas/client/viscera/boot_resources.py +++ b/maas/client/viscera/boot_resources.py @@ -1,9 +1,6 @@ """Objects for boot resources.""" -__all__ = [ - "BootResource", - "BootResources", -] +__all__ = ["BootResource", "BootResources"] import enum import hashlib @@ -49,31 +46,23 @@ class BootResourceFileType(enum.Enum): class BootResourceFile(Object): """A boot resource file.""" - filename = ObjectField.Checked( - "filename", check(str), readonly=True) - filetype = ObjectField.Checked( - "filetype", check(str), readonly=True) - size = ObjectField.Checked( - "size", check(int), readonly=True) - sha256 = ObjectField.Checked( - "sha256", check(str), readonly=True) - complete = ObjectField.Checked( - "complete", check(bool), readonly=True) + filename = ObjectField.Checked("filename", check(str), readonly=True) + filetype = ObjectField.Checked("filetype", check(str), readonly=True) + size = ObjectField.Checked("size", check(int), readonly=True) + sha256 = ObjectField.Checked("sha256", check(str), readonly=True) + complete = ObjectField.Checked("complete", check(bool), readonly=True) class BootResourceSet(Object): """A boot resource set.""" - version = ObjectField.Checked( - "version", check(str), readonly=True) - size = ObjectField.Checked( - "size", check(int), readonly=True) - label = ObjectField.Checked( - "label", check(str), readonly=True) - complete = ObjectField.Checked( - "complete", check(bool), readonly=True) + version = ObjectField.Checked("version", check(str), readonly=True) + size = ObjectField.Checked("size", check(int), readonly=True) + label = ObjectField.Checked("label", check(str), readonly=True) + complete = ObjectField.Checked("complete", check(bool), readonly=True) files = ObjectField.Checked( - "files", mapping_of(BootResourceFile), default=None, readonly=True) + "files", mapping_of(BootResourceFile), default=None, readonly=True + ) class BootResourcesType(ObjectType): @@ -106,10 +95,16 @@ async def stop_import(cls): return cls._handler.stop_import() async def create( - cls, name: str, architecture: str, content: io.IOBase, *, - title: str="", - filetype: BootResourceFileType=BootResourceFileType.TGZ, - chunk_size=(1 << 22), progress_callback=None): + cls, + name: str, + architecture: str, + content: io.IOBase, + *, + title: str = "", + filetype: BootResourceFileType = BootResourceFileType.TGZ, + chunk_size=(1 << 22), + progress_callback=None + ): """Create a `BootResource`. Creates an uploaded boot resource with `content`. The `content` is @@ -139,24 +134,28 @@ async def create( :returns: Create boot resource. :rtype: `BootResource`. """ - if '/' not in name: - raise ValueError( - "name must be in format os/release; missing '/'") - if '/' not in architecture: - raise ValueError( - "architecture must be in format arch/subarch; missing '/'") + if "/" not in name: + raise ValueError("name must be in format os/release; missing '/'") + if "/" not in architecture: + raise ValueError("architecture must be in format arch/subarch; missing '/'") if not content.readable(): raise ValueError("content must be readable") elif not content.seekable(): raise ValueError("content must be seekable") if chunk_size <= 0: - raise ValueError( - "chunk_size must be greater than 0, not %d" % chunk_size) + raise ValueError("chunk_size must be greater than 0, not %d" % chunk_size) size, sha256 = calc_size_and_sha265(content, chunk_size) - resource = cls._object(await cls._handler.create( - name=name, architecture=architecture, title=title, - filetype=filetype.value, size=str(size), sha256=sha256)) + resource = cls._object( + await cls._handler.create( + name=name, + architecture=architecture, + title=title, + filetype=filetype.value, + size=str(size), + sha256=sha256, + ) + ) newest_set = max(resource.sets, default=None) assert newest_set is not None resource_set = resource.sets[newest_set] @@ -167,17 +166,21 @@ async def create( return resource else: # Upload in chunks and reload boot resource. - await cls._upload_chunks( - rfile, content, chunk_size, progress_callback) + await cls._upload_chunks(rfile, content, chunk_size, progress_callback) return cls._object.read(resource.id) async def _upload_chunks( - cls, rfile: BootResourceFile, content: io.IOBase, chunk_size: int, - progress_callback=None): + cls, + rfile: BootResourceFile, + content: io.IOBase, + chunk_size: int, + progress_callback=None, + ): """Upload the `content` to `rfile` in chunks using `chunk_size`.""" content.seek(0, io.SEEK_SET) - upload_uri = urlparse( - cls._handler.uri)._replace(path=rfile._data['upload_uri']).geturl() + upload_uri = ( + urlparse(cls._handler.uri)._replace(path=rfile._data["upload_uri"]).geturl() + ) uploaded_size = 0 insecure = cls._handler.session.insecure @@ -197,21 +200,20 @@ async def _upload_chunks( break async def _put_chunk( - cls, session: aiohttp.ClientSession, - upload_uri: str, buf: bytes): + cls, session: aiohttp.ClientSession, upload_uri: str, buf: bytes + ): """Upload one chunk to `upload_uri`.""" # Build the correct headers. headers = { - 'Content-Type': 'application/octet-stream', - 'Content-Length': '%s' % len(buf), + "Content-Type": "application/octet-stream", + "Content-Length": "%s" % len(buf), } credentials = cls._handler.session.credentials if credentials is not None: utils.sign(upload_uri, headers, credentials) # Perform upload of chunk. - async with await session.put( - upload_uri, data=buf, headers=headers) as response: + async with await session.put(upload_uri, data=buf, headers=headers) as response: if response.status != 200: content = await response.read() request = { @@ -228,7 +230,6 @@ class BootResources(ObjectSet, metaclass=BootResourcesType): class BootResourceType(ObjectType): - async def read(cls, id: int): """Get `BootResource` by `id`.""" data = await cls._handler.read(id=id) @@ -238,23 +239,27 @@ async def read(cls, id: int): class BootResource(Object, metaclass=BootResourceType): """A boot resource.""" - id = ObjectField.Checked( - "id", check(int), readonly=True, pk=True) - type = ObjectField.Checked( - "type", check(str), check(str), readonly=True) - name = ObjectField.Checked( - "name", check(str), check(str), readonly=True) + id = ObjectField.Checked("id", check(int), readonly=True, pk=True) + type = ObjectField.Checked("type", check(str), check(str), readonly=True) + name = ObjectField.Checked("name", check(str), check(str), readonly=True) architecture = ObjectField.Checked( - "architecture", check(str), check(str), readonly=True) + "architecture", check(str), check(str), readonly=True + ) subarches = ObjectField.Checked( - "subarches", check_optional(str), check_optional(str), - default=None, readonly=True) + "subarches", + check_optional(str), + check_optional(str), + default=None, + readonly=True, + ) sets = ObjectField.Checked( - "sets", mapping_of(BootResourceSet), default=None, readonly=True) + "sets", mapping_of(BootResourceSet), default=None, readonly=True + ) def __repr__(self): return super(BootResource, self).__repr__( - fields={"type", "name", "architecture"}) + fields={"type", "name", "architecture"} + ) async def delete(self): """Delete boot resource.""" diff --git a/maas/client/viscera/boot_source_selections.py b/maas/client/viscera/boot_source_selections.py index a2735e65..818e674f 100644 --- a/maas/client/viscera/boot_source_selections.py +++ b/maas/client/viscera/boot_source_selections.py @@ -1,20 +1,10 @@ """Objects for boot source selections.""" -__all__ = [ - "BootSourceSelection", - "BootSourceSelections", -] - -from collections import Sequence - -from . import ( - check, - Object, - ObjectField, - ObjectFieldRelated, - ObjectSet, - ObjectType, -) +__all__ = ["BootSourceSelection", "BootSourceSelections"] + + +from collections.abc import Sequence +from . import check, Object, ObjectField, ObjectFieldRelated, ObjectSet, ObjectType from .boot_sources import BootSource @@ -22,23 +12,27 @@ class BootSourceSelectionsType(ObjectType): """Metaclass for `BootSourceSelections`.""" async def create( - cls, boot_source, os, release, *, - arches=None, subarches=None, labels=None): + cls, boot_source, os, release, *, arches=None, subarches=None, labels=None + ): """Create a new `BootSourceSelection`.""" if not isinstance(boot_source, BootSource): raise TypeError( - "boot_source must be a BootSource, not %s" - % type(boot_source).__name__) + "boot_source must be a BootSource, not %s" % type(boot_source).__name__ + ) if arches is None: - arches = ['*'] + arches = ["*"] if subarches is None: - subarches = ['*'] + subarches = ["*"] if labels is None: - labels = ['*'] + labels = ["*"] data = await cls._handler.create( boot_source_id=boot_source.id, - os=os, release=release, arches=arches, subarches=subarches, - labels=labels) + os=os, + release=release, + arches=arches, + subarches=subarches, + labels=labels, + ) return cls._object(data, {"boot_source_id": boot_source.id}) async def read(cls, boot_source): @@ -50,11 +44,13 @@ async def read(cls, boot_source): else: raise TypeError( "boot_source must be a BootSource or int, not %s" - % type(boot_source).__name__) + % type(boot_source).__name__ + ) data = await cls._handler.read(boot_source_id=boot_source_id) return cls( - cls._object(item, local_data={'boot_source_id': boot_source_id}) - for item in data) + cls._object(item, local_data={"boot_source_id": boot_source_id}) + for item in data + ) class BootSourceSelections(ObjectSet, metaclass=BootSourceSelectionsType): @@ -62,7 +58,6 @@ class BootSourceSelections(ObjectSet, metaclass=BootSourceSelectionsType): class BootSourceSelectionType(ObjectType): - async def read(cls, boot_source, id): """Get `BootSourceSelection` by `id`.""" if isinstance(boot_source, int): @@ -72,7 +67,8 @@ async def read(cls, boot_source, id): else: raise TypeError( "boot_source must be a BootSource or int, not %s" - % type(boot_source).__name__) + % type(boot_source).__name__ + ) data = await cls._handler.read(boot_source_id=boot_source_id, id=id) return cls(data, {"boot_source_id": boot_source_id}) @@ -83,26 +79,27 @@ class BootSourceSelection(Object, metaclass=BootSourceSelectionType): # Only client-side. Classes in this file place `boot_source_id` on # the object using `local_data`. boot_source = ObjectFieldRelated( - "boot_source_id", "BootSource", readonly=True, pk=0) - - id = ObjectField.Checked( - "id", check(int), readonly=True, pk=1) - os = ObjectField.Checked( - "os", check(str), check(str)) - release = ObjectField.Checked( - "release", check(str), check(str)) + "boot_source_id", "BootSource", readonly=True, pk=0 + ) + + id = ObjectField.Checked("id", check(int), readonly=True, pk=1) + os = ObjectField.Checked("os", check(str), check(str)) + release = ObjectField.Checked("release", check(str), check(str)) arches = ObjectField.Checked( # List[str] - "arches", check(Sequence), check(Sequence)) + "arches", check(Sequence), check(Sequence) + ) subarches = ObjectField.Checked( # List[str] - "subarches", check(Sequence), check(Sequence)) + "subarches", check(Sequence), check(Sequence) + ) labels = ObjectField.Checked( # List[str] - "labels", check(Sequence), check(Sequence)) + "labels", check(Sequence), check(Sequence) + ) def __repr__(self): return super(BootSourceSelection, self).__repr__( - fields={"os", "release", "arches", "subarches", "labels"}) + fields={"os", "release", "arches", "subarches", "labels"} + ) async def delete(self): """Delete boot source selection.""" - await self._handler.delete( - boot_source_id=self.boot_source.id, id=self.id) + await self._handler.delete(boot_source_id=self.boot_source.id, id=self.id) diff --git a/maas/client/viscera/boot_sources.py b/maas/client/viscera/boot_sources.py index eb2111f0..d5daaf3f 100644 --- a/maas/client/viscera/boot_sources.py +++ b/maas/client/viscera/boot_sources.py @@ -1,18 +1,8 @@ """Objects for boot sources.""" -__all__ = [ - "BootSource", - "BootSources", -] - -from . import ( - check, - Object, - ObjectField, - ObjectSet, - ObjectType, - parse_timestamp, -) +__all__ = ["BootSource", "BootSources"] + +from . import check, Object, ObjectField, ObjectSet, ObjectType, parse_timestamp from ..utils import coalesce @@ -29,8 +19,10 @@ async def create(cls, url, *, keyring_filename=None, keyring_data=None): instance of `io.BytesIO`. """ data = await cls._handler.create( - url=url, keyring_filename=coalesce(keyring_filename, ""), - keyring_data=coalesce(keyring_data, "")) + url=url, + keyring_filename=coalesce(keyring_filename, ""), + keyring_data=coalesce(keyring_data, ""), + ) return cls._object(data) async def read(cls): @@ -44,7 +36,6 @@ class BootSources(ObjectSet, metaclass=BootSourcesType): class BootSourceType(ObjectType): - async def read(cls, id): """Get `BootSource` by `id`.""" data = await cls._handler.read(id=id) @@ -54,22 +45,21 @@ async def read(cls, id): class BootSource(Object, metaclass=BootSourceType): """A boot source.""" - id = ObjectField.Checked( - "id", check(int), readonly=True, pk=True) - url = ObjectField.Checked( - "url", check(str), check(str)) + id = ObjectField.Checked("id", check(int), readonly=True, pk=True) + url = ObjectField.Checked("url", check(str), check(str)) keyring_filename = ObjectField.Checked( - "keyring_filename", check(str), check(str), default="") + "keyring_filename", check(str), check(str), default="" + ) keyring_data = ObjectField.Checked( - "keyring_data", check(str), check(str), default="") - created = ObjectField.Checked( - "created", parse_timestamp, readonly=True) - updated = ObjectField.Checked( - "updated", parse_timestamp, readonly=True) + "keyring_data", check(str), check(str), default="" + ) + created = ObjectField.Checked("created", parse_timestamp, readonly=True) + updated = ObjectField.Checked("updated", parse_timestamp, readonly=True) def __repr__(self): return super(BootSource, self).__repr__( - fields={"url", "keyring_filename", "keyring_data"}) + fields={"url", "keyring_filename", "keyring_data"} + ) async def delete(self): """Delete boot source.""" diff --git a/maas/client/viscera/controllers.py b/maas/client/viscera/controllers.py index ae570b9d..83fc66be 100644 --- a/maas/client/viscera/controllers.py +++ b/maas/client/viscera/controllers.py @@ -1,24 +1,9 @@ """Objects for region and rack controllers.""" -__all__ = [ - "RackController", - "RackControllers", - "RegionController", - "RegionControllers", -] - -from . import ( - check, - check_optional, - ObjectField, - to, -) -from .nodes import ( - Node, - Nodes, - NodesType, - NodeTypeMeta, -) +__all__ = ["RackController", "RackControllers", "RegionController", "RegionControllers"] + +from . import check, check_optional, ObjectField, to +from .nodes import Node, Nodes, NodesType, NodeTypeMeta from ..enum import PowerState @@ -38,17 +23,13 @@ class RackController(Node, metaclass=RackControllerType): """A rack-controller stored in MAAS.""" architecture = ObjectField.Checked( - "architecture", check_optional(str), check_optional(str)) - cpus = ObjectField.Checked( - "cpu_count", check(int), check(int)) - distro_series = ObjectField.Checked( - "distro_series", check(str), check(str)) - memory = ObjectField.Checked( - "memory", check(int), check(int)) - osystem = ObjectField.Checked( - "osystem", check(str), readonly=True) - power_state = ObjectField.Checked( - "power_state", to(PowerState), readonly=True) + "architecture", check_optional(str), check_optional(str) + ) + cpus = ObjectField.Checked("cpu_count", check(int), check(int)) + distro_series = ObjectField.Checked("distro_series", check(str), check(str)) + memory = ObjectField.Checked("memory", check(int), check(int)) + osystem = ObjectField.Checked("osystem", check(str), readonly=True) + power_state = ObjectField.Checked("power_state", to(PowerState), readonly=True) # power_type # service_set @@ -71,17 +52,13 @@ class RegionController(Node, metaclass=RegionControllerType): """A region-controller stored in MAAS.""" architecture = ObjectField.Checked( - "architecture", check_optional(str), check_optional(str)) - cpus = ObjectField.Checked( - "cpu_count", check(int), check(int)) - distro_series = ObjectField.Checked( - "distro_series", check(str), check(str)) - memory = ObjectField.Checked( - "memory", check(int), check(int)) - osystem = ObjectField.Checked( - "osystem", check(str), readonly=True) - power_state = ObjectField.Checked( - "power_state", to(PowerState), readonly=True) + "architecture", check_optional(str), check_optional(str) + ) + cpus = ObjectField.Checked("cpu_count", check(int), check(int)) + distro_series = ObjectField.Checked("distro_series", check(str), check(str)) + memory = ObjectField.Checked("memory", check(int), check(int)) + osystem = ObjectField.Checked("osystem", check(str), readonly=True) + power_state = ObjectField.Checked("power_state", to(PowerState), readonly=True) # power_type # service_set diff --git a/maas/client/viscera/devices.py b/maas/client/viscera/devices.py index 1b4ecc45..ee74f75f 100644 --- a/maas/client/viscera/devices.py +++ b/maas/client/viscera/devices.py @@ -1,21 +1,51 @@ """Objects for devices.""" -__all__ = [ - "Device", - "Devices", -] +__all__ = ["Device", "Devices"] -from .nodes import ( - Node, - Nodes, - NodesType, - NodeTypeMeta, -) +import typing + +from .nodes import Node, Nodes, NodesType, NodeTypeMeta +from .zones import Zone class DevicesType(NodesType): """Metaclass for `Devices`.""" + async def create( + cls, + mac_addresses: typing.Sequence[str], + hostname: str = None, + domain: typing.Union[int, str] = None, + zone: typing.Union[str, Zone] = None, + ): + """Create a new device. + + :param mac_addresses: The MAC address(es) of the device (required). + :type mac_addresses: sequence of `str` + :param hostname: The hostname for the device (optional). + :type hostname: `str` + :param domain: The domain for the device (optional). + :type domain: `int` or `str` + :param zone: The zone for the device (optional). + :type zone: `Zone` or `str` + + """ + params = {"mac_addresses": mac_addresses} + if hostname is not None: + params["hostname"] = hostname + if domain is not None: + params["domain"] = domain + if zone is not None: + if isinstance(zone, Zone): + params["zone"] = zone.name + elif isinstance(zone, str): + params["zone"] = zone + else: + raise TypeError( + "zone must be a str or Zone, not %s" % type(zone).__name__ + ) + return cls._object(await cls._handler.create(**params)) + class Devices(Nodes, metaclass=DevicesType): """The set of devices stored in MAAS.""" diff --git a/maas/client/viscera/dnsresourcerecords.py b/maas/client/viscera/dnsresourcerecords.py new file mode 100644 index 00000000..4aae103c --- /dev/null +++ b/maas/client/viscera/dnsresourcerecords.py @@ -0,0 +1,38 @@ +"""Objects for dnsresourcerecords.""" + +__all__ = ["DNSResourceRecord", "DNSResourceRecords"] + +from . import check, check_optional, Object, ObjectField, ObjectSet, ObjectType + + +class DNSResourceRecordType(ObjectType): + """Metaclass for `DNSResourceRecords`.""" + + async def read(cls): + data = await cls._handler.read() + return cls(map(cls._object, data)) + + +class DNSResourceRecords(ObjectSet, metaclass=DNSResourceRecordType): + """The set of dnsresourcerecords stored in MAAS.""" + + +class DNSResourceRecordType(ObjectType): + async def read(cls, id): + data = await cls._handler.read(id=id) + return cls(data) + + +class DNSResourceRecord(Object, metaclass=DNSResourceRecordType): + """A dnsresourcerecord stored in MAAS.""" + + id = ObjectField.Checked("id", check(int), readonly=True, pk=True) + ttl = ObjectField.Checked("ttl", check_optional(int), check_optional(int)) + rrtype = ObjectField.Checked("rrtype", check(str), check(str)) + rrdata = ObjectField.Checked("rrdata", check(str), check(str)) + fqdn = ObjectField.Checked("fqdn", check(str), check(str)) + + def __repr__(self): + return super(DNSResourceRecord, self).__repr__( + fields={"ttl", "rrtype", "rrdata", "fqdn"} + ) diff --git a/maas/client/viscera/dnsresources.py b/maas/client/viscera/dnsresources.py new file mode 100644 index 00000000..97e1e46b --- /dev/null +++ b/maas/client/viscera/dnsresources.py @@ -0,0 +1,48 @@ +"""Objects for dnsresources.""" + +__all__ = ["DNSResource", "DNSResources"] + +from . import ( + check, + check_optional, + Object, + ObjectField, + ObjectSet, + ObjectType, + ObjectFieldRelatedSet, +) + + +class DNSResourceType(ObjectType): + """Metaclass for `DNSResources`.""" + + async def read(cls): + data = await cls._handler.read() + return cls(map(cls._object, data)) + + +class DNSResources(ObjectSet, metaclass=DNSResourceType): + """The set of dnsresources stored in MAAS.""" + + +class DNSResourceType(ObjectType): + async def read(cls, id): + data = await cls._handler.read(id=id) + return cls(data) + + +class DNSResource(Object, metaclass=DNSResourceType): + """A dnsresource stored in MAAS.""" + + id = ObjectField.Checked("id", check(int), readonly=True, pk=True) + address_ttl = ObjectField.Checked( + "address_ttl", check_optional(int), check_optional(int) + ) + fqdn = ObjectField.Checked("fqdn", check(str), check(str)) + ip_addresses = ObjectFieldRelatedSet("ip_addresses", "IPAddresses") + resource_records = ObjectFieldRelatedSet("resource_records", "DNSResourceRecords") + + def __repr__(self): + return super(DNSResource, self).__repr__( + fields={"address_ttl", "fqdn", "ip_addresses", "resource_records"} + ) diff --git a/maas/client/viscera/domains.py b/maas/client/viscera/domains.py new file mode 100644 index 00000000..43c96f4d --- /dev/null +++ b/maas/client/viscera/domains.py @@ -0,0 +1,61 @@ +"""Objects for domains.""" + +__all__ = ["Domain", "Domains"] + +from . import check, check_optional, Object, ObjectField, ObjectSet, ObjectType + + +class DomainType(ObjectType): + """Metaclass for `Domains`.""" + + async def read(cls): + data = await cls._handler.read() + return cls(map(cls._object, data)) + + async def create(cls, name: str, authoritative: bool = True, ttl: int = None): + """ + Create a `Domain` in MAAS. + + :param name: The name of the `Domain`. + :type name: `str` + :param authoritative: Whether the domain is authoritative. + :type authoritative: `bool` + :param ttl: Optional TTL for the domain. + :type ttl: `int` + :returns: The created `Domain` + :rtype: `Domain` + """ + params = {"name": name, "authoritative": authoritative} + if ttl is not None: + params["ttl"] = ttl + return cls._object(await cls._handler.create(**params)) + + +class Domains(ObjectSet, metaclass=DomainType): + """The set of domains stored in MAAS.""" + + +class DomainType(ObjectType): + async def read(cls, id): + data = await cls._handler.read(id=id) + return cls(data) + + +class Domain(Object, metaclass=DomainType): + """A domain stored in MAAS.""" + + id = ObjectField.Checked("id", check(int), readonly=True, pk=True) + name = ObjectField.Checked("name", check(str), check(str)) + authoritative = ObjectField.Checked( + "authoritative", check_optional(bool), check_optional(bool) + ) + ttl = ObjectField.Checked("ttl", check_optional(int), check_optional(int)) + + def __repr__(self): + return super(Domain, self).__repr__(fields={"name", "authoritative", "ttl"}) + + async def delete(self): + """ + Deletes the `domain` from MAAS. + """ + return await self._handler.delete(id=self.id) diff --git a/maas/client/viscera/events.py b/maas/client/viscera/events.py index 2eeb37e0..ac3f31fc 100644 --- a/maas/client/viscera/events.py +++ b/maas/client/viscera/events.py @@ -1,28 +1,19 @@ """Objects for events.""" -__all__ = [ - "Events", -] +__all__ = ["Events"] from datetime import datetime import enum from functools import partial import logging import typing -from urllib.parse import ( - parse_qs, - urlparse, -) +from urllib.parse import parse_qs, urlparse +from .users import User import pytz -from . import ( - Object, - ObjectField, - ObjectSet, - ObjectType, -) -from ..utils.async import is_loop_running +from . import Object, ObjectField, ObjectSet, ObjectType +from ..utils.maas_async import is_loop_running # # The query API call returns: @@ -43,7 +34,8 @@ # level=event.type.level_str, # created=event.created.strftime('%a, %d %b. %Y %H:%M:%S'), # type=event.type.description, -# description=event.description +# description=event.description, +# username=event.user.username # ) # # Notes: @@ -57,6 +49,7 @@ class Level(enum.IntEnum): They happen to correspond to levels in the `logging` module. """ + AUDIT = 0 DEBUG = logging.DEBUG INFO = logging.INFO WARNING = logging.WARNING @@ -83,17 +76,20 @@ class EventsType(ObjectType): Level = Level async def query( - cls, *, - hostnames: typing.Iterable[str]=None, - domains: typing.Iterable[str]=None, - zones: typing.Iterable[str]=None, - macs: typing.Iterable[str]=None, - system_ids: typing.Iterable[str]=None, - agent_name: str=None, - level: typing.Union[Level, int, str]=None, - before: int=None, - after: int=None, - limit: int=None): + cls, + *, + hostnames: typing.Iterable[str] = None, + domains: typing.Iterable[str] = None, + zones: typing.Iterable[str] = None, + macs: typing.Iterable[str] = None, + system_ids: typing.Iterable[str] = None, + agent_name: str = None, + level: typing.Union[Level, int, str] = None, + before: int = None, + after: int = None, + limit: int = None, + owner: typing.Union[User, str] = None + ): """Query MAAS for matching events.""" if before is not None and after is not None: @@ -122,6 +118,15 @@ async def query( params["after"] = ["{:d}".format(after)] if limit is not None: params["limit"] = ["{:d}".format(limit)] + if owner is not None: + if isinstance(owner, User): + params["owner"] = [owner.username] + elif isinstance(owner, str): + params["owner"] = [owner] + else: + raise TypeError( + "owner must be either User or str, not %s" % (type(owner).__name__) + ) data = await cls._handler.query(**params) return cls(data) @@ -192,8 +197,8 @@ def _backwards_sync(self): yield from current if is_loop_running(): raise RuntimeError( - "Cannot iterate synchronously while " - "event-loop is running.") + "Cannot iterate synchronously while " "event-loop is running." + ) current = current.prev() def forwards(self): @@ -214,13 +219,12 @@ def _forwards_sync(self): yield from reversed(current) if is_loop_running(): raise RuntimeError( - "Cannot iterate synchronously while " - "event-loop is running.") + "Cannot iterate synchronously while " "event-loop is running." + ) current = current.next() class EventsAsyncIteratorBackwards: - def __init__(self, current): super(EventsAsyncIteratorBackwards, self).__init__() self._current_iter = iter(current) @@ -242,7 +246,6 @@ async def __anext__(self): class EventsAsyncIteratorForwards: - def __init__(self, current): super(EventsAsyncIteratorForwards, self).__init__() self._current_iter = reversed(current) @@ -271,7 +274,7 @@ def truncate(length, text): # TODO: Move into utils. Otherwise return the given text unaltered. """ if len(text) > length: - return text[:length - 1] + "…" + return text[: length - 1] + "…" else: return text @@ -279,25 +282,21 @@ def truncate(length, text): # TODO: Move into utils. class Event(Object): """An event.""" - event_id = ObjectField( - "id", readonly=True) - event_type = ObjectField( - "type", readonly=True) + event_id = ObjectField("id", readonly=True) + event_type = ObjectField("type", readonly=True) - system_id = ObjectField( - "node", readonly=True) - hostname = ObjectField( - "hostname", readonly=True) + system_id = ObjectField("node", readonly=True) + hostname = ObjectField("hostname", readonly=True) - level = ObjectField.Checked( - "level", Level.normalise, readonly=True) - created = ObjectField.Checked( - "created", parse_created_timestamp, readonly=True) + level = ObjectField.Checked("level", Level.normalise, readonly=True) + created = ObjectField.Checked("created", parse_created_timestamp, readonly=True) - description = ObjectField( - "description", readonly=True) + description = ObjectField("description", readonly=True) description_short = ObjectField.Checked( - "description", partial(truncate, 50), readonly=True) + "description", partial(truncate, 50), readonly=True + ) + + username = ObjectField("username", readonly=True) def __repr__(self): return ( diff --git a/maas/client/viscera/fabrics.py b/maas/client/viscera/fabrics.py index 4af7635d..16896a9e 100644 --- a/maas/client/viscera/fabrics.py +++ b/maas/client/viscera/fabrics.py @@ -1,18 +1,8 @@ """Objects for fabrics.""" -__all__ = [ - "Fabrics", - "Fabric", -] - -from . import ( - check, - Object, - ObjectField, - ObjectFieldRelatedSet, - ObjectSet, - ObjectType, -) +__all__ = ["Fabrics", "Fabric"] + +from . import check, Object, ObjectField, ObjectFieldRelatedSet, ObjectSet, ObjectType from ..errors import CannotDelete @@ -23,8 +13,9 @@ async def read(cls): data = await cls._handler.read() return cls(map(cls._object, data)) - async def create(cls, *, name: str=None, - description: str=None, class_type: str=None): + async def create( + cls, *, name: str = None, description: str = None, class_type: str = None + ): """ Create a `Fabric` in MAAS. @@ -73,10 +64,8 @@ async def read(cls, id: int): class Fabric(Object, metaclass=FabricType): """A Fabric.""" - id = ObjectField.Checked( - "id", check(int), readonly=True, pk=True) - name = ObjectField.Checked( - "name", check(str), check(str)) + id = ObjectField.Checked("id", check(int), readonly=True, pk=True) + name = ObjectField.Checked("name", check(str), check(str)) vlans = ObjectFieldRelatedSet("vlans", "Vlans") async def delete(self): diff --git a/maas/client/viscera/files.py b/maas/client/viscera/files.py index 3c7d0652..e11835a0 100644 --- a/maas/client/viscera/files.py +++ b/maas/client/viscera/files.py @@ -1,17 +1,8 @@ """Objects for files.""" -__all__ = [ - "File", - "Files", -] +__all__ = ["File", "Files"] -from . import ( - check, - Object, - ObjectField, - ObjectSet, - ObjectType, -) +from . import check, Object, ObjectField, ObjectSet, ObjectType class FilesType(ObjectType): @@ -29,5 +20,4 @@ class Files(ObjectSet, metaclass=FilesType): class File(Object): """A file stored in MAAS.""" - filename = ObjectField.Checked( - "filename", check(str), readonly=True) + filename = ObjectField.Checked("filename", check(str), readonly=True) diff --git a/maas/client/viscera/filesystem_groups.py b/maas/client/viscera/filesystem_groups.py new file mode 100644 index 00000000..41c6738d --- /dev/null +++ b/maas/client/viscera/filesystem_groups.py @@ -0,0 +1,98 @@ +"""Base object for all filesystem group objects.""" + +__all__ = ["FilesystemGroup"] + +from typing import Sequence + +from . import ( + check, + Object, + ObjectField, + ObjectFieldRelated, + ObjectFieldRelatedSet, + ObjectSet, + undefined, +) +from ..enum import BlockDeviceType + + +def get_device_object(origin, datum): + device_type = datum.get("type") + if device_type in [BlockDeviceType.PHYSICAL.value, BlockDeviceType.VIRTUAL.value]: + return origin.BlockDevice(datum) + elif device_type == "partition": + return origin.Partition(datum) + else: + raise ValueError("Unknown devices type: %s" % device_type) + + +class FilesystemGroupDevices(ObjectSet): + """Devices that make up a `FilesystemGroup`.""" + + +class DevicesField(ObjectFieldRelatedSet): + """Field for `FilesystemGroupDevices`.""" + + def __init__(self, name): + """Create a `DevicesField`. + + :param name: The name of the field. This is the name that's used to + store the datum in the MAAS-side data dictionary. + """ + super(ObjectFieldRelatedSet, self).__init__(name, default=[], readonly=True) + + def datum_to_value(self, instance, datum): + """Convert a given MAAS-side datum to a Python-side value. + + :param instance: The `Object` instance on which this field is + currently operating. This method should treat it as read-only, for + example to perform validation with regards to other fields. + :param datum: The MAAS-side datum to validate and convert into a + Python-side value. + :return: A set of `cls` from the given datum. + """ + if datum is None: + return [] + if not isinstance(datum, Sequence): + raise TypeError("datum must be a sequence, not %s" % type(datum).__name__) + # Get the class from the bound origin. + bound = getattr(instance._origin, "FilesystemGroupDevices") + return bound((get_device_object(instance._origin, item) for item in datum)) + + +class DeviceField(ObjectFieldRelated): + """Field that returns either `BlockDevice` or `Partition`.""" + + def __init__(self, name, readonly=False): + """Create a `DevicesField`. + + :param name: The name of the field. This is the name that's used to + store the datum in the MAAS-side data dictionary. + """ + super(ObjectFieldRelated, self).__init__( + name, default=undefined, readonly=readonly + ) + + def datum_to_value(self, instance, datum): + """Convert a given MAAS-side datum to a Python-side value. + + :param instance: The `Object` instance on which this field is + currently operating. This method should treat it as read-only, for + example to perform validation with regards to other fields. + :param datum: The MAAS-side datum to validate and convert into a + Python-side value. + :return: A set of `cls` from the given datum. + """ + return get_device_object(instance._origin, datum) + + +class FilesystemGroup(Object): + """A filesystem group on a machine. + + Used by `CacheSet`, `Bcache`, `Raid`, and `VolumeGroup`. Never use + directly. + """ + + node = ObjectFieldRelated("system_id", "Node", readonly=True, pk=0) + id = ObjectField.Checked("id", check(int), readonly=True, pk=1) + name = ObjectField.Checked("name", check(str), check(str), alt_pk=1) diff --git a/maas/client/viscera/filesystems.py b/maas/client/viscera/filesystems.py new file mode 100644 index 00000000..89df5295 --- /dev/null +++ b/maas/client/viscera/filesystems.py @@ -0,0 +1,18 @@ +"""Objects for filesystems.""" + +__all__ = ["Filesystem"] + +from . import check, Object, ObjectField + + +class Filesystem(Object): + """A filesystem on either a partition or block device.""" + + label = ObjectField.Checked("label", check(str), readonly=True) + fstype = ObjectField.Checked("fstype", check(str), readonly=True) + mount_point = ObjectField.Checked("mount_point", check(str), readonly=True) + mount_options = ObjectField.Checked("mount_options", check(str), readonly=True) + uuid = ObjectField.Checked("uuid", check(str), readonly=True) + + def __repr__(self): + return super(Filesystem, self).__repr__(fields={"fstype", "mount_point"}) diff --git a/maas/client/viscera/interfaces.py b/maas/client/viscera/interfaces.py index 21ab7ba6..c9d2a22e 100644 --- a/maas/client/viscera/interfaces.py +++ b/maas/client/viscera/interfaces.py @@ -1,9 +1,6 @@ """Objects for interfaces.""" -__all__ = [ - "Interface", - "Interfaces", -] +__all__ = ["Interface", "Interfaces"] import copy from typing import Iterable, Union @@ -21,10 +18,7 @@ from .nodes import Node from .subnets import Subnet from .vlans import Vlan -from ..enum import ( - InterfaceType, - LinkMode, -) +from ..enum import InterfaceType, LinkMode from ..utils.diff import calculate_dict_diff @@ -37,8 +31,9 @@ def gen_parents(parents): pass else: raise TypeError( - 'parent[%d] must be an Interface or int, not %s' % ( - idx, type(parent).__name__)) + "parent[%d] must be an Interface or int, not %s" + % (idx, type(parent).__name__) + ) yield parent @@ -50,8 +45,8 @@ def get_parent(parent): return parent else: raise TypeError( - "parent must be an Interface or int, not %s" % ( - type(parent).__name__)) + "parent must be an Interface or int, not %s" % (type(parent).__name__) + ) class InterfaceTypeMeta(ObjectType): @@ -64,9 +59,7 @@ async def read(cls, node, id): elif isinstance(node, Node): system_id = node.system_id else: - raise TypeError( - "node must be a Node or str, not %s" - % type(node).__name__) + raise TypeError("node must be a Node or str, not %s" % type(node).__name__) return cls(await cls._handler.read(system_id=system_id, id=id)) @@ -80,88 +73,86 @@ def map_nic_name_to_dict(instance, value): incomplete data. """ return { - 'system_id': instance._data['system_id'], - 'name': value, - '__incomplete__': True + "system_id": instance._data["system_id"], + "name": value, + "__incomplete__": True, } class Interface(Object, metaclass=InterfaceTypeMeta): """A interface on a machine.""" - node = ObjectFieldRelated( - "system_id", "Node", readonly=True, pk=0) - id = ObjectField.Checked( - "id", check(int), readonly=True, pk=1) - type = ObjectField.Checked( - "type", to(InterfaceType), readonly=True) - name = ObjectField.Checked( - "name", check(str), check(str), alt_pk=1) - mac_address = ObjectField.Checked( - "mac_address", check(str), check(str)) - enabled = ObjectField.Checked( - "enabled", check(bool), check(bool)) - effective_mtu = ObjectField.Checked( - "effective_mtu", check(int), readonly=True) - tags = ObjectField.Checked( - "tags", check(list), check(list)) - params = ObjectField.Checked( - "params", check(dict), check(dict)) + node = ObjectFieldRelated("system_id", "Node", readonly=True, pk=0) + id = ObjectField.Checked("id", check(int), readonly=True, pk=1) + type = ObjectField.Checked("type", to(InterfaceType), readonly=True) + name = ObjectField.Checked("name", check(str), check(str), alt_pk=1) + mac_address = ObjectField.Checked("mac_address", check(str), check(str)) + enabled = ObjectField.Checked("enabled", check(bool), check(bool)) + effective_mtu = ObjectField.Checked("effective_mtu", check(int), readonly=True) + tags = ObjectField.Checked("tags", check(list), check(list)) + params = ObjectField.Checked("params", check((dict, str)), check((dict, str))) parents = ObjectFieldRelatedSet( - "parents", "Interfaces", reverse=None, - map_func=map_nic_name_to_dict) + "parents", "Interfaces", reverse=None, map_func=map_nic_name_to_dict + ) children = ObjectFieldRelatedSet( - "children", "Interfaces", reverse=None, - map_func=map_nic_name_to_dict) - vlan = ObjectFieldRelated( - "vlan", "Vlan", reverse=None, use_data_setter=True) - links = ObjectFieldRelatedSet( - "links", "InterfaceLinks", reverse="interface") + "children", "Interfaces", reverse=None, map_func=map_nic_name_to_dict + ) + vlan = ObjectFieldRelated("vlan", "Vlan", reverse=None, use_data_setter=True) + links = ObjectFieldRelatedSet("links", "InterfaceLinks", reverse="interface") discovered = ObjectFieldRelatedSet( - "discovered", "InterfaceDiscoveredLinks", reverse=None) + "discovered", "InterfaceDiscoveredLinks", reverse=None + ) + interface_speed = ObjectField.Checked("interface_speed", check(int), readonly=True) + link_speed = ObjectField.Checked("link_speed", check(int), readonly=True) def __repr__(self): - return super(Interface, self).__repr__( - fields={"name", "mac_address", "type"}) + return super(Interface, self).__repr__(fields={"name", "mac_address", "type"}) async def save(self): """Save this interface.""" - if set(self.tags) != set(self._orig_data['tags']): - self._changed_data['tags'] = ','.join(self.tags) - elif 'tags' in self._changed_data: - del self._changed_data['tags'] - self._changed_data.update( - calculate_dict_diff(self._orig_data['params'], self.params)) - if 'vlan' in self._changed_data and self._changed_data['vlan']: + if set(self.tags) != set(self._orig_data["tags"]): + self._changed_data["tags"] = ",".join(self.tags) + elif "tags" in self._changed_data: + del self._changed_data["tags"] + orig_params = self._orig_data["params"] + if not isinstance(orig_params, dict): + orig_params = {} + params = self.params + if not isinstance(params, dict): + params = {} + self._changed_data.pop("params", None) + self._changed_data.update(calculate_dict_diff(orig_params, params)) + if "vlan" in self._changed_data and self._changed_data["vlan"]: # Update uses the ID of the VLAN, not the VLAN object. - self._changed_data['vlan'] = self._changed_data['vlan']['id'] - if (self._orig_data['vlan'] and - 'id' in self._orig_data['vlan'] and - self._changed_data['vlan'] == ( - self._orig_data['vlan']['id'])): + self._changed_data["vlan"] = self._changed_data["vlan"]["id"] + if ( + self._orig_data["vlan"] + and "id" in self._orig_data["vlan"] + and self._changed_data["vlan"] == (self._orig_data["vlan"]["id"]) + ): # VLAN didn't really change, the object was just set to the # same VLAN. - del self._changed_data['vlan'] + del self._changed_data["vlan"] await super(Interface, self).save() async def delete(self): """Delete this interface.""" - await self._handler.delete( - system_id=self.node.system_id, id=self.id) + await self._handler.delete(system_id=self.node.system_id, id=self.id) async def disconnect(self): """Disconnect this interface.""" - self._data = await self._handler.disconnect( - system_id=self.node.system_id, id=self.id) + self._reset( + await self._handler.disconnect(system_id=self.node.system_id, id=self.id) + ) class InterfaceDiscoveredLink(Object): """Discovered link information on an `Interface`.""" ip_address = ObjectField.Checked( - "ip_address", check(str), readonly=True, default=None) - subnet = ObjectFieldRelated( - "subnet", "Subnet", readonly=True, default=None) + "ip_address", check(str), readonly=True, default=None + ) + subnet = ObjectFieldRelated("subnet", "Subnet", readonly=True, default=None) class InterfaceDiscoveredLinks(ObjectSet): @@ -173,38 +164,45 @@ class InterfaceLink(Object): id = ObjectField.Checked("id", check(int), readonly=True) mode = ObjectField.Checked("mode", to(LinkMode), readonly=True) - subnet = ObjectFieldRelated( - "subnet", "Subnet", readonly=True, default=None) + subnet = ObjectFieldRelated("subnet", "Subnet", readonly=True, default=None) ip_address = ObjectField.Checked( - "ip_address", check(str), readonly=True, default=None) + "ip_address", check(str), readonly=True, default=None + ) def __repr__(self): return super(InterfaceLink, self).__repr__( - fields={"mode", "ip_address", "subnet"}) + fields={"mode", "ip_address", "subnet"} + ) async def delete(self): """Delete this interface link.""" - interface = self._data['interface'] + interface = self._data["interface"] data = await interface._handler.unlink_subnet( - system_id=interface.node.system_id, id=interface.id, _id=self.id) - interface._data['links'] = list(data['links']) - interface._orig_data['links'] = copy.deepcopy(interface._data['links']) + system_id=interface.node.system_id, id=interface.id, _id=self.id + ) + interface._data["links"] = list(data["links"]) + interface._orig_data["links"] = copy.deepcopy(interface._data["links"]) async def set_as_default_gateway(self): """Set this link as the default gateway for the node.""" - interface = self._data['interface'] + interface = self._data["interface"] await interface._handler.set_default_gateway( - system_id=interface.node.system_id, id=interface.id, - link_id=self.id) + system_id=interface.node.system_id, id=interface.id, link_id=self.id + ) class InterfaceLinksType(ObjectType): """Metaclass for `InterfaceLinks`.""" async def create( - cls, interface: Interface, mode: LinkMode, - subnet: Union[Subnet, int]=None, ip_address: str=None, - force: bool=False, default_gateway: bool=False): + cls, + interface: Interface, + mode: LinkMode, + subnet: Union[Subnet, int] = None, + ip_address: str = None, + force: bool = False, + default_gateway: bool = False, + ): """ Create a link on `Interface` in MAAS. @@ -232,12 +230,10 @@ async def create( """ if not isinstance(interface, Interface): raise TypeError( - "interface must be an Interface, not %s" - % type(interface).__name__) + "interface must be an Interface, not %s" % type(interface).__name__ + ) if not isinstance(mode, LinkMode): - raise TypeError( - "mode must be a LinkMode, not %s" - % type(mode).__name__) + raise TypeError("mode must be a LinkMode, not %s" % type(mode).__name__) if subnet is not None: if isinstance(subnet, Subnet): subnet = subnet.id @@ -245,42 +241,36 @@ async def create( pass else: raise TypeError( - "subnet must be a Subnet or int, not %s" - % type(subnet).__name__) + "subnet must be a Subnet or int, not %s" % type(subnet).__name__ + ) if mode in [LinkMode.AUTO, LinkMode.STATIC]: if subnet is None: - raise ValueError('subnet is required for %s' % mode) + raise ValueError("subnet is required for %s" % mode) if default_gateway and mode not in [LinkMode.AUTO, LinkMode.STATIC]: - raise ValueError('cannot set as default_gateway for %s' % mode) + raise ValueError("cannot set as default_gateway for %s" % mode) params = { - 'system_id': interface.node.system_id, - 'id': interface.id, - 'mode': mode.value, - 'force': force, - 'default_gateway': default_gateway, + "system_id": interface.node.system_id, + "id": interface.id, + "mode": mode.value, + "force": force, + "default_gateway": default_gateway, } if subnet is not None: - params['subnet'] = subnet + params["subnet"] = subnet if ip_address is not None: - params['ip_address'] = ip_address + params["ip_address"] = ip_address # The API doesn't return just the link it returns the whole interface. # Store the link ids before the save to find the addition at the end. - link_ids = { - link.id - for link in interface.links - } + link_ids = {link.id for link in interface.links} data = await interface._handler.link_subnet(**params) # Update the links on the interface, except for the newly created link # the `ManagedCreate` wrapper will add that to the interfaces link data # automatically. - new_links = { - link['id']: link - for link in data['links'] - } + new_links = {link["id"]: link for link in data["links"]} links_diff = list(set(new_links.keys()) - link_ids) new_link = new_links.pop(links_diff[0]) - interface._data['links'] = list(new_links.values()) - interface._orig_data['links'] = copy.deepcopy(interface._data['links']) + interface._data["links"] = list(new_links.values()) + interface._orig_data["links"] = copy.deepcopy(interface._data["links"]) return cls._object(new_link) @@ -298,26 +288,35 @@ async def read(cls, node): elif isinstance(node, Node): system_id = node.system_id else: - raise TypeError( - "node must be a Node or str, not %s" - % type(node).__name__) + raise TypeError("node must be a Node or str, not %s" % type(node).__name__) data = await cls._handler.read(system_id=system_id) return cls( - cls._object( - item, local_data={"node_system_id": system_id}) - for item in data) + cls._object(item, local_data={"node_system_id": system_id}) for item in data + ) async def create( - cls, node: Union[Node, str], - interface_type: InterfaceType=InterfaceType.PHYSICAL, *, - name: str=None, mac_address: str=None, tags: Iterable[str]=None, - vlan: Union[Vlan, int]=None, parent: Union[Interface, int]=None, - parents: Iterable[Union[Interface, int]]=None, mtu: int=None, - accept_ra: bool=None, autoconf: bool=None, - bond_mode: str=None, bond_miimon: int=None, - bond_downdelay: int=None, bond_updelay: int=None, - bond_lacp_rate: str=None, bond_xmit_hash_policy: str=None, - bridge_stp: bool=None, bridge_fd: int=None): + cls, + node: Union[Node, str], + interface_type: InterfaceType = InterfaceType.PHYSICAL, + *, + name: str = None, + mac_address: str = None, + tags: Iterable[str] = None, + vlan: Union[Vlan, int] = None, + parent: Union[Interface, int] = None, + parents: Iterable[Union[Interface, int]] = None, + mtu: int = None, + accept_ra: bool = None, + autoconf: bool = None, + bond_mode: str = None, + bond_miimon: int = None, + bond_downdelay: int = None, + bond_updelay: int = None, + bond_lacp_rate: str = None, + bond_xmit_hash_policy: str = None, + bridge_stp: bool = None, + bridge_fd: int = None + ): """ Create a `Interface` in MAAS. @@ -327,7 +326,7 @@ async def create( :type interface_type: `InterfaceType` :param name: The name for the interface (optional). :type name: `str` - :param tags: List of tags to add to the machine. + :param tags: List of tags to add to the interface. :type tags: sequence of `str` :param mtu: The MTU for the interface (optional). :type mtu: `int` @@ -391,81 +390,80 @@ async def create( """ params = {} if isinstance(node, str): - params['system_id'] = node + params["system_id"] = node elif isinstance(node, Node): - params['system_id'] = node.system_id + params["system_id"] = node.system_id else: raise TypeError( - 'node must be a Node or str, not %s' % ( - type(node).__name__)) + "node must be a Node or str, not %s" % (type(node).__name__) + ) if name is not None: - params['name'] = name + params["name"] = name if tags is not None: - params['tags'] = tags + params["tags"] = tags if mtu is not None: - params['mtu'] = mtu + params["mtu"] = mtu if vlan is not None: if isinstance(vlan, Vlan): - params['vlan'] = vlan.id + params["vlan"] = vlan.id elif isinstance(vlan, int): - params['vlan'] = vlan + params["vlan"] = vlan else: raise TypeError( - 'vlan must be a Vlan or int, not %s' % ( - type(vlan).__name__)) + "vlan must be a Vlan or int, not %s" % (type(vlan).__name__) + ) if accept_ra is not None: - params['accept_ra'] = accept_ra + params["accept_ra"] = accept_ra if autoconf is not None: - params['autoconf'] = autoconf + params["autoconf"] = autoconf handler = None if not isinstance(interface_type, InterfaceType): raise TypeError( - 'interface_type must be an InterfaceType, not %s' % ( - type(interface_type).__name__)) + "interface_type must be an InterfaceType, not %s" + % (type(interface_type).__name__) + ) if interface_type == InterfaceType.PHYSICAL: handler = cls._handler.create_physical if mac_address: - params['mac_address'] = mac_address + params["mac_address"] = mac_address else: - raise ValueError( - 'mac_address required for physical interface') + raise ValueError("mac_address required for physical interface") elif interface_type == InterfaceType.BOND: handler = cls._handler.create_bond if parent is not None: raise ValueError("use parents not parent for bond interface") if not isinstance(parents, Iterable): raise TypeError( - 'parents must be a iterable, not %s' % ( - type(parents).__name__)) + "parents must be a iterable, not %s" % (type(parents).__name__) + ) if len(parents) == 0: - raise ValueError( - 'at least one parent required for bond interface') - params['parents'] = list(gen_parents(parents)) + raise ValueError("at least one parent required for bond interface") + params["parents"] = list(gen_parents(parents)) if not name: - raise ValueError('name is required for bond interface') + raise ValueError("name is required for bond interface") if mac_address is not None: - params['mac_address'] = mac_address + params["mac_address"] = mac_address if bond_mode is not None: - params['bond_mode'] = bond_mode + params["bond_mode"] = bond_mode if bond_miimon is not None: - params['bond_miimon'] = bond_miimon + params["bond_miimon"] = bond_miimon if bond_downdelay is not None: - params['bond_downdelay'] = bond_downdelay + params["bond_downdelay"] = bond_downdelay if bond_updelay is not None: - params['bond_updelay'] = bond_updelay + params["bond_updelay"] = bond_updelay if bond_lacp_rate is not None: - params['bond_lacp_rate'] = bond_lacp_rate + params["bond_lacp_rate"] = bond_lacp_rate if bond_xmit_hash_policy is not None: - params['bond_xmit_hash_policy'] = bond_xmit_hash_policy + params["bond_xmit_hash_policy"] = bond_xmit_hash_policy elif interface_type == InterfaceType.VLAN: handler = cls._handler.create_vlan if parents is not None: raise ValueError("use parent not parents for VLAN interface") if parent is None: raise ValueError("parent is required for VLAN interface") - params['parent'] = get_parent(parent) + params["parent"] = get_parent(parent) if vlan is None: raise ValueError("vlan is required for VLAN interface") elif interface_type == InterfaceType.BRIDGE: @@ -474,18 +472,17 @@ async def create( raise ValueError("use parent not parents for bridge interface") if parent is None: raise ValueError("parent is required for bridge interface") - params['parent'] = get_parent(parent) + params["parent"] = get_parent(parent) if not name: - raise ValueError('name is required for bridge interface') + raise ValueError("name is required for bridge interface") if mac_address is not None: - params['mac_address'] = mac_address + params["mac_address"] = mac_address if bridge_stp is not None: - params['bridge_stp'] = bridge_stp + params["bridge_stp"] = bridge_stp if bridge_fd is not None: - params['bridge_fd'] = bridge_fd + params["bridge_fd"] = bridge_fd else: - raise ValueError( - "cannot create an interface of type: %s" % interface_type) + raise ValueError("cannot create an interface of type: %s" % interface_type) return cls._object(await handler(**params)) @@ -495,12 +492,9 @@ class Interfaces(ObjectSet, metaclass=InterfacesType): @property def by_name(self): - """Return mapping of name of interface to interface object.""" - return { - interface.name: interface - for interface in self - } + """Return mapping of name of interface to `Interface`.""" + return {interface.name: interface for interface in self} def get_by_name(self, name): - """Return an interface by its name.""" + """Return an `Interface` by its name.""" return self.by_name[name] diff --git a/maas/client/viscera/ip_addresses.py b/maas/client/viscera/ip_addresses.py new file mode 100644 index 00000000..5997e76e --- /dev/null +++ b/maas/client/viscera/ip_addresses.py @@ -0,0 +1,54 @@ +"""Objects for ipaddresses.""" + +__all__ = ["IPAddress", "IPAddresses"] + +from . import ( + check, + parse_timestamp, + Object, + ObjectField, + ObjectSet, + ObjectType, + ObjectFieldRelatedSet, + ObjectFieldRelated, + OriginObjectRef, +) + + +class IPAddressType(ObjectType): + """Metaclass for `IPAddresses`.""" + + async def read(cls): + data = await cls._handler.read() + return cls(map(cls._object, data)) + + +class IPAddresses(ObjectSet, metaclass=IPAddressType): + """The set of ipaddresses stored in MAAS.""" + + _object = OriginObjectRef(name="IPAddress") + + +class IPAddress(Object): + """An ipaddress stored in MAAS.""" + + alloc_type = ObjectField.Checked("alloc_type", check(int), check(int)) + alloc_type_name = ObjectField.Checked("alloc_type_name", check(str), check(str)) + created = ObjectField.Checked("created", parse_timestamp, readonly=True) + ip = ObjectField.Checked("ip", check(str)) + owner = ObjectFieldRelated("owner", "User") + interface_set = ObjectFieldRelatedSet("interface_set", "Interfaces") + subnet = ObjectFieldRelated("subnet", "Subnet", readonly=True, default=None) + + def __repr__(self): + return super(IPAddress, self).__repr__( + fields={ + "alloc_type", + "alloc_type_name", + "created", + "ip", + "owner", + "interface_set", + "subnet", + } + ) diff --git a/maas/client/viscera/ipranges.py b/maas/client/viscera/ipranges.py index 8ccdc7e6..3049eefa 100644 --- a/maas/client/viscera/ipranges.py +++ b/maas/client/viscera/ipranges.py @@ -1,19 +1,8 @@ """Objects for ipranges.""" -__all__ = [ - "IPRanges", - "IPRange", -] - -from . import ( - check, - Object, - ObjectField, - ObjectFieldRelated, - ObjectSet, - ObjectType, - to, -) +__all__ = ["IPRanges", "IPRange"] + +from . import check, Object, ObjectField, ObjectFieldRelated, ObjectSet, ObjectType, to from .subnets import Subnet from ..enum import IPRangeType from typing import Union @@ -29,9 +18,14 @@ async def read(cls): return cls(map(cls._object, data)) async def create( - cls, start_ip: str, end_ip: str, *, - type: IPRangeType=IPRangeType.RESERVED, - comment: str=None, subnet: Union[Subnet, int]=None): + cls, + start_ip: str, + end_ip: str, + *, + type: IPRangeType = IPRangeType.RESERVED, + comment: str = None, + subnet: Union[Subnet, int] = None + ): """ Create a `IPRange` in MAAS. @@ -50,11 +44,10 @@ async def create( :returns: The created IPRange :rtype: `IPRange` """ - params = { - 'start_ip': start_ip, - 'end_ip': end_ip, - 'type': type, - } + if not isinstance(type, IPRangeType): + raise TypeError("type must be an IPRangeType, not %s" % TYPE(type).__name__) + + params = {"start_ip": start_ip, "end_ip": end_ip, "type": type.value} if comment is not None: params["comment"] = comment if subnet is not None: @@ -64,8 +57,8 @@ async def create( params["subnet"] = subnet else: raise TypeError( - "subnet must be Subnet or int, not %s" % ( - TYPE(subnet).__class__)) + "subnet must be Subnet or int, not %s" % (TYPE(subnet).__class__) + ) return cls._object(await cls._handler.create(**params)) @@ -73,7 +66,7 @@ class IPRanges(ObjectSet, metaclass=IPRangesType): """The set of IPRanges stored in MAAS.""" -class IPRangeType(ObjectType): +class IPRangeTypeMeta(ObjectType): """Metaclass for `IPRange`.""" async def read(cls, id: int): @@ -82,19 +75,14 @@ async def read(cls, id: int): return cls(data) -class IPRange(Object, metaclass=IPRangeType): +class IPRange(Object, metaclass=IPRangeTypeMeta): """A IPRange.""" - id = ObjectField.Checked( - "id", check(int), readonly=True, pk=True) - start_ip = ObjectField.Checked( - "start_ip", check(str)) - end_ip = ObjectField.Checked( - "end_ip", check(str)) - type = ObjectField.Checked( - "type", to(IPRangeType), readonly=True) - comment = ObjectField.Checked( - "comment", check(str)) + id = ObjectField.Checked("id", check(int), readonly=True, pk=True) + start_ip = ObjectField.Checked("start_ip", check(str)) + end_ip = ObjectField.Checked("end_ip", check(str)) + type = ObjectField.Checked("type", to(IPRangeType), readonly=True) + comment = ObjectField.Checked("comment", check(str)) subnet = ObjectFieldRelated("subnet", "Subnet", readonly=True, pk=0) async def delete(self): diff --git a/maas/client/viscera/logical_volumes.py b/maas/client/viscera/logical_volumes.py new file mode 100644 index 00000000..d7416c2b --- /dev/null +++ b/maas/client/viscera/logical_volumes.py @@ -0,0 +1,94 @@ +"""Objects for logical volumes.""" + +__all__ = ["LogicalVolume", "LogicalVolumes"] + +from typing import Iterable + +from .block_devices import ( + BlockDevice, + BlockDevices, + BlockDevicesType, + BlockDeviceTypeMeta, +) +from .volume_groups import VolumeGroup +from ..utils import remove_None + + +class LogicalVolumesType(BlockDevicesType): + """Metaclass for `LogicalVolumes`.""" + + def bind(cls, origin, handler, handlers, *, name=None): + # LogicalVolumes is just a wrapper over BlockDevices. So the + # `BlockDevices` handler is binded instead of an empty handler. + handler = handlers.get("BlockDevices") + return super(LogicalVolumesType, cls).bind(origin, handler, handlers) + + async def create( + cls, + volume_group: VolumeGroup, + name: str, + size: int, + *, + uuid: str = None, + tags: Iterable[str] = None + ): + """ + Create a logical volume on the volume group. + + :param volume_group: Volume group to create the logical volume on. + :type node: `VolumeGroup` + :param name: The name for the logical volume. + :type name: `str` + :param size: The size of the logical volume in bytes. + :type size: `int` + :param uuid: UUID of the logical volume. + :type uuid: `str` + :param tags: List of tags to add to the logical volume. + :type tags: sequence of `str` + """ + if not isinstance(volume_group, VolumeGroup): + raise TypeError( + "volume_group must be a VolumeGroup, not %s" + % (type(volume_group).__name__) + ) + + params = {"system_id": volume_group.node.system_id, "id": volume_group.id} + if not name: + raise ValueError("name must be provided.") + if not size or size < 0: + raise ValueError("size must be provided and greater than zero.") + + params.update(remove_None({"name": name, "size": size, "uuid": uuid})) + data = await volume_group._handler.create_logical_volume(**params) + # Create logical volume doesn't return a full block device object. + # Load the logical volume using the block device endpoint, ensures that + # all the data present to access the fields. + bd_handler = getattr(cls._origin, "BlockDevice")._handler + volume = cls._object( + await bd_handler.read(system_id=data["system_id"], id=data["id"]) + ) + if tags: + volume.tags = tags + await volume.save() + return volume + + +class LogicalVolumes(BlockDevices, metaclass=LogicalVolumesType): + """The set of logical volumes on a volume group.""" + + +class LogicalVolumeType(BlockDeviceTypeMeta): + """Metaclass for `LogicalVolume`.""" + + def bind(cls, origin, handler, handlers, *, name=None): + # LogicalVolume is just a wrapper over BlockDevice. So the + # `BlockDevice` handler is binded instead of an empty handler. + handler = handlers.get("BlockDevice") + return super(LogicalVolumeType, cls).bind(origin, handler, handlers) + + +class LogicalVolume(BlockDevice, metaclass=LogicalVolumeType): + """A logical volume on a volume group.""" + + def __repr__(self): + return super(BlockDevice, self).__repr__(name="LogicalVolume", fields={"name"}) diff --git a/maas/client/viscera/maas.py b/maas/client/viscera/maas.py index a7943d08..e9604333 100644 --- a/maas/client/viscera/maas.py +++ b/maas/client/viscera/maas.py @@ -1,17 +1,12 @@ """Objects for MAASs.""" -__all__ = [ - "MAAS", -] +__all__ = ["MAAS"] import enum import re import typing -from . import ( - Object, - ObjectType, -) +from . import Object, ObjectType from ..bones import CallError @@ -50,9 +45,7 @@ def lookup(cls, parameter): if member.parameter == parameter: return member else: - raise KeyError( - "%s value %r not recognised." - % (cls.__name__, parameter)) + raise KeyError("%s value %r not recognised." % (cls.__name__, parameter)) class MAASType(ObjectType): @@ -173,13 +166,13 @@ async def get_upstream_dns(cls) -> list: server config. """ data = await cls.get_config("upstream_dns") - return [] if data is None else re.split("[,\s]+", data) + return [] if data is None else re.split(r"[,\s]+", data) - async def set_upstream_dns( - cls, addresses: typing.Optional[typing.Sequence[str]]): + async def set_upstream_dns(cls, addresses: typing.Optional[typing.Sequence[str]]): """See `get_upstream_dns`.""" - await cls.set_config("upstream_dns", ( - "" if addresses is None else",".join(addresses))) + await cls.set_config( + "upstream_dns", ("" if addresses is None else ",".join(addresses)) + ) class DNSSEC(DescriptiveEnum): """DNSSEC validation settings. @@ -222,8 +215,7 @@ async def get_enable_disk_erasing_on_release(cls) -> bool: async def set_enable_disk_erasing_on_release(cls, erase: bool): """Should nodes' disks be erased prior to releasing.""" - await cls.set_config( - "enable_disk_erasing_on_release", _django_boolean(erase)) + await cls.set_config("enable_disk_erasing_on_release", _django_boolean(erase)) async def get_windows_kms_host(cls) -> typing.Optional[str]: """Windows KMS activation host. @@ -288,7 +280,8 @@ async def get_default_min_hwe_kernel(cls) -> typing.Optional[str]: async def set_default_min_hwe_kernel(cls, version: typing.Optional[str]): """See `get_default_min_hwe_kernel`.""" await cls.set_config( - "default_min_hwe_kernel", "" if version is None else version) + "default_min_hwe_kernel", "" if version is None else version + ) async def get_enable_third_party_drivers(cls) -> bool: """Enable the installation of proprietary drivers, e.g. HPVSA.""" @@ -296,8 +289,7 @@ async def get_enable_third_party_drivers(cls) -> bool: async def set_enable_third_party_drivers(cls, enabled: bool): """See `get_enable_third_party_drivers`.""" - await cls.set_config( - "enable_third_party_drivers", _django_boolean(enabled)) + await cls.set_config("enable_third_party_drivers", _django_boolean(enabled)) async def get_config(cls, name: str): """Get a configuration value from MAAS. @@ -318,11 +310,13 @@ async def set_config(cls, name: str, value): async def _roundtrip(cls): """Testing helper: gets each value and sets it again.""" getters = { - name[4:]: getattr(cls, name) for name in dir(cls) + name[4:]: getattr(cls, name) + for name in dir(cls) if name.startswith("get_") and name != "get_config" } setters = { - name[4:]: getattr(cls, name) for name in dir(cls) + name[4:]: getattr(cls, name) + for name in dir(cls) if name.startswith("set_") and name != "set_config" } @@ -340,9 +334,7 @@ async def _roundtrip(cls): else: value2 = await getter() if value2 != value: - print( - "!!! Round-trip failed:", repr(value), - "-->", repr(value2)) + print("!!! Round-trip failed:", repr(value), "-->", repr(value2)) class MAAS(Object, metaclass=MAASType): diff --git a/maas/client/viscera/machines.py b/maas/client/viscera/machines.py index deefaf9d..f9ac96dc 100644 --- a/maas/client/viscera/machines.py +++ b/maas/client/viscera/machines.py @@ -1,13 +1,11 @@ """Objects for machines.""" -__all__ = [ - "Machine", - "Machines", -] +__all__ = ["Machine", "Machines"] import asyncio import base64 import bson +import json from http import HTTPStatus import typing @@ -16,29 +14,18 @@ check_optional, ObjectField, ObjectFieldRelated, + ObjectFieldRelatedSet, to, ) from .fabrics import Fabric from .interfaces import Interface -from .nodes import ( - Node, - Nodes, - NodesType, - NodeTypeMeta, -) +from .nodes import Node, Nodes, NodesType, NodeTypeMeta +from .pods import Pod from .subnets import Subnet from .zones import Zone from ..bones import CallError -from ..enum import ( - NodeStatus, - PowerState, - PowerStopMode -) -from ..errors import ( - MAASException, - OperationNotAllowed, - PowerError -) +from ..enum import NodeStatus, PowerState, PowerStopMode +from ..errors import MAASException, OperationNotAllowed, PowerError from ..utils import remove_None from ..utils.diff import calculate_dict_diff @@ -49,7 +36,7 @@ ZoneParam = typing.Union[str, Zone] -def get_param_arg(param, idx, klass, arg, attr='id'): +def get_param_arg(param, idx, klass, arg, attr="id"): """Return the correct value for a fabric from `arg`.""" if isinstance(arg, klass): return getattr(arg, attr) @@ -57,19 +44,26 @@ def get_param_arg(param, idx, klass, arg, attr='id'): return arg else: raise TypeError( - "%s[%d] must be int, str, or %s, not %s" % ( - param, idx, klass.__name__, type(arg).__name__)) + "%s[%d] must be int, str, or %s, not %s" + % (param, idx, klass.__name__, type(arg).__name__) + ) class MachinesType(NodesType): """Metaclass for `Machines`.""" async def create( - cls, architecture: str, mac_addresses: typing.Sequence[str], - power_type: str, - power_parameters: typing.Mapping[str, typing.Any]=None, *, - subarchitecture: str=None, min_hwe_kernel: str=None, - hostname: str=None, domain: typing.Union[int, str]=None): + cls, + architecture: str, + mac_addresses: typing.Sequence[str], + power_type: str, + power_parameters: typing.Mapping[str, typing.Any] = None, + *, + subarchitecture: str = None, + min_hwe_kernel: str = None, + hostname: str = None, + domain: typing.Union[int, str] = None + ): """ Create a new machine. @@ -97,7 +91,7 @@ async def create( "power_type": power_type, } if power_parameters is not None: - params["power_parameters"] = power_parameters + params["power_parameters"] = json.dumps(power_parameters, sort_keys=True) if subarchitecture is not None: params["subarchitecture"] = subarchitecture if min_hwe_kernel is not None: @@ -109,26 +103,34 @@ async def create( return cls._object(await cls._handler.create(**params)) async def allocate( - cls, *, - hostname: str=None, - architectures: typing.Sequence[str]=None, - cpus: int=None, - fabrics: typing.Sequence[FabricParam]=None, - interfaces: typing.Sequence[InterfaceParam]=None, - memory: float=None, - pod: str=None, - pod_type: str=None, - storage: typing.Sequence[str]=None, - subnets: typing.Sequence[SubnetParam]=None, - tags: typing.Sequence[str]=None, - zone: typing.Union[str, Zone]=None, - not_fabrics: typing.Sequence[FabricParam]=None, - not_subnets: typing.Sequence[SubnetParam]=None, - not_tags: typing.Sequence[str]=None, - not_zones: typing.Sequence[ZoneParam]=None, - agent_name: str=None, comment: str=None, - bridge_all: bool=None, bridge_stp: bool=None, bridge_fd: int=None, - dry_run: bool=None, verbose: bool=None): + cls, + *, + hostname: str = None, + architectures: typing.Sequence[str] = None, + cpus: int = None, + fabrics: typing.Sequence[FabricParam] = None, + interfaces: typing.Sequence[InterfaceParam] = None, + memory: float = None, + pod: typing.Union[str, Pod] = None, + not_pod: typing.Union[str, Pod] = None, + pod_type: str = None, + not_pod_type: str = None, + storage: typing.Sequence[str] = None, + subnets: typing.Sequence[SubnetParam] = None, + tags: typing.Sequence[str] = None, + zone: typing.Union[str, Zone] = None, + not_fabrics: typing.Sequence[FabricParam] = None, + not_subnets: typing.Sequence[SubnetParam] = None, + not_tags: typing.Sequence[str] = None, + not_zones: typing.Sequence[ZoneParam] = None, + agent_name: str = None, + comment: str = None, + bridge_all: bool = None, + bridge_stp: bool = None, + bridge_fd: int = None, + dry_run: bool = None, + verbose: bool = None + ): """ Allocate a machine. @@ -146,8 +148,12 @@ async def allocate( :type memory: `int` :param pod: The pod to allocate the machine from. :type pod: `str` + :param not_pod: Pod the machine must not be located in. + :type not_pod: `str` :param pod_type: The type of pod to allocate the machine from. :type pod_type: `str` + :param not_pod_type: Pod type the machine must not be located in. + :type not_pod_type: `str` :param subnets: The subnet(s) the desired machine must be linked to. :type subnets: sequence of `str` or `int` or `Subnet` :param storage: The storage contraint to match. @@ -185,37 +191,55 @@ async def allocate( which machine(s) matched). :type verbose: `bool` """ - params = remove_None({ - 'name': hostname, - 'arch': architectures, - 'cpu_count': str(cpus) if cpus else None, - 'mem': str(memory) if memory else None, - 'pod': pod, - 'pod_type': pod_type, - 'storage': storage, - 'tags': tags, - 'not_tags': not_tags, - 'agent_name': agent_name, - 'comment': comment, - 'bridge_all': bridge_all, - 'bridge_stp': bridge_stp, - 'bridge_fd': bridge_fd, - 'dry_run': dry_run, - 'verbose': verbose, - }) + params = remove_None( + { + "name": hostname, + "arch": architectures, + "cpu_count": str(cpus) if cpus else None, + "mem": str(memory) if memory else None, + "pod_type": pod_type, + "not_pod_type": not_pod_type, + "storage": storage, + "tags": tags, + "not_tags": not_tags, + "agent_name": agent_name, + "comment": comment, + "bridge_all": bridge_all, + "bridge_stp": bridge_stp, + "bridge_fd": bridge_fd, + "dry_run": dry_run, + "verbose": verbose, + } + ) if fabrics is not None: params["fabrics"] = [ - get_param_arg('fabrics', idx, Fabric, fabric) + get_param_arg("fabrics", idx, Fabric, fabric) for idx, fabric in enumerate(fabrics) ] if interfaces is not None: params["interfaces"] = [ - get_param_arg('interfaces', idx, Interface, nic) + get_param_arg("interfaces", idx, Interface, nic) for idx, nic in enumerate(interfaces) ] + if pod is not None: + if isinstance(pod, Pod): + params["pod"] = pod.name + elif isinstance(pod, str): + params["pod"] = pod + else: + raise TypeError("pod must be a str or Pod, not %s" % type(pod).__name__) + if not_pod is not None: + if isinstance(not_pod, Pod): + params["not_pod"] = not_pod.name + elif isinstance(not_pod, str): + params["not_pod"] = not_pod + else: + raise TypeError( + "not_pod must be a str or Pod, not %s" % type(not_pod).__name__ + ) if subnets is not None: params["subnets"] = [ - get_param_arg('subnets', idx, Subnet, subnet) + get_param_arg("subnets", idx, Subnet, subnet) for idx, subnet in enumerate(subnets) ] if zone is not None: @@ -225,20 +249,21 @@ async def allocate( params["zone"] = zone else: raise TypeError( - "zone must be a str or Zone, not %s" % type(zone).__name__) + "zone must be a str or Zone, not %s" % type(zone).__name__ + ) if not_fabrics is not None: params["not_fabrics"] = [ - get_param_arg('not_fabrics', idx, Fabric, fabric) + get_param_arg("not_fabrics", idx, Fabric, fabric) for idx, fabric in enumerate(not_fabrics) ] if not_subnets is not None: params["not_subnets"] = [ - get_param_arg('not_subnets', idx, Subnet, subnet) + get_param_arg("not_subnets", idx, Subnet, subnet) for idx, subnet in enumerate(not_subnets) ] if not_zones is not None: params["not_in_zones"] = [ - get_param_arg('not_zones', idx, Zone, zone, attr='name') + get_param_arg("not_zones", idx, Zone, zone, attr="name") for idx, zone in enumerate(not_zones) ] try: @@ -252,8 +277,7 @@ async def allocate( else: return cls._object(data) - async def get_power_parameters_for( - cls, system_ids: typing.Sequence[str]): + async def get_power_parameters_for(cls, system_ids: typing.Sequence[str]): """ Get a list of power parameters for specified systems. *WARNING*: This method is considered 'alpha' and may be modified @@ -316,100 +340,83 @@ class Machine(Node, metaclass=MachineType): """A machine stored in MAAS.""" architecture = ObjectField.Checked( - "architecture", check_optional(str), check_optional(str)) - boot_disk = ObjectField.Checked( - "boot_disk", check_optional(str), check_optional(str)) - cpus = ObjectField.Checked( - "cpu_count", check(int), check(int)) - disable_ipv4 = ObjectField.Checked( - "disable_ipv4", check(bool), check(bool)) - distro_series = ObjectField.Checked( - "distro_series", check(str), readonly=True) + "architecture", check_optional(str), check_optional(str) + ) + boot_disk = ObjectFieldRelated("boot_disk", "BlockDevice", readonly=True) + boot_interface = ObjectFieldRelated("boot_interface", "Interface", readonly=True) + block_devices = ObjectFieldRelatedSet( + "blockdevice_set", "BlockDevices", reverse=None + ) + bcaches = ObjectFieldRelatedSet("bcaches", "Bcaches", reverse=None) + cache_sets = ObjectFieldRelatedSet("cache_sets", "BcacheCacheSets", reverse=None) + cpus = ObjectField.Checked("cpu_count", check(int), check(int)) + disable_ipv4 = ObjectField.Checked("disable_ipv4", check(bool), check(bool)) + distro_series = ObjectField.Checked("distro_series", check(str), readonly=True) hwe_kernel = ObjectField.Checked( - "hwe_kernel", check_optional(str), check_optional(str)) - memory = ObjectField.Checked( - "memory", check(int), check(int)) + "hwe_kernel", check_optional(str), check_optional(str) + ) + locked = ObjectField.Checked("locked", check(bool), readonly=True) + memory = ObjectField.Checked("memory", check(int), check(int)) min_hwe_kernel = ObjectField.Checked( - "min_hwe_kernel", check_optional(str), check_optional(str)) - osystem = ObjectField.Checked( - "osystem", check(str), readonly=True) - owner_data = ObjectField.Checked( - "owner_data", check(dict), check(dict)) - - boot_interface = ObjectFieldRelated( - "boot_interface", "Interface", readonly=True) - - # blockdevice_set - # macaddress_set - # netboot - # physicalblockdevice_set - - power_state = ObjectField.Checked( - "power_state", to(PowerState), readonly=True) - power_type = ObjectField.Checked( - "power_type", check(str), readonly=True) - - # pxe_mac - # resource_uri - # routers - # status - # storage - - status = ObjectField.Checked( - "status", to(NodeStatus), readonly=True) + "min_hwe_kernel", check_optional(str), check_optional(str) + ) + netboot = ObjectField.Checked("netboot", check(bool), readonly=True) + osystem = ObjectField.Checked("osystem", check(str), readonly=True) + owner_data = ObjectField.Checked("owner_data", check(dict), check(dict)) + status = ObjectField.Checked("status", to(NodeStatus), readonly=True) status_action = ObjectField.Checked( - "status_action", check_optional(str), readonly=True) + "status_action", check_optional(str), readonly=True + ) status_message = ObjectField.Checked( - "status_message", check_optional(str), readonly=True) - status_name = ObjectField.Checked( - "status_name", check(str), readonly=True) - - # swap_size - # virtualblockdevice_set + "status_message", check_optional(str), readonly=True + ) + status_name = ObjectField.Checked("status_name", check(str), readonly=True) + raids = ObjectFieldRelatedSet("raids", "Raids", reverse=None) + volume_groups = ObjectFieldRelatedSet("volume_groups", "VolumeGroups", reverse=None) + pod = ObjectFieldRelated("pod", "Pod", readonly=True) async def save(self): """Save the machine in MAAS.""" - orig_owner_data = self._orig_data['owner_data'] - new_owner_data = dict(self._data['owner_data']) - self._changed_data.pop('owner_data', None) + orig_owner_data = self._orig_data["owner_data"] + new_owner_data = dict(self._data["owner_data"]) + self._changed_data.pop("owner_data", None) await super(Machine, self).save() params_diff = calculate_dict_diff(orig_owner_data, new_owner_data) if len(params_diff) > 0: - params_diff['system_id'] = self.system_id + params_diff["system_id"] = self.system_id await self._handler.set_owner_data(**params_diff) - self._data['owner_data'] = self._data['owner_data'] - - async def get_power_parameters(self): - """Get the power paramters for this machine.""" - data = await self._handler.power_parameters(system_id=self.system_id) - return data + self._data["owner_data"] = self._data["owner_data"] - async def abort(self, *, comment: str=None): + async def abort(self, *, comment: str = None): """Abort the current action. :param comment: Reason for aborting the action. :param type: `str` """ - params = { - "system_id": self.system_id - } + params = {"system_id": self.system_id} if comment: params["comment"] = comment - self._data = await self._handler.abort(**params) + self._reset(await self._handler.abort(**params)) return self async def clear_default_gateways(self): """Clear default gateways.""" - self._data = await self._handler.clear_default_gateways( - system_id=self.system_id) + self._reset( + await self._handler.clear_default_gateways(system_id=self.system_id) + ) return self async def commission( - self, *, enable_ssh: bool=None, skip_networking: bool=None, - skip_storage: bool=None, - commissioning_scripts: typing.Sequence[str]=None, - testing_scripts: typing.Sequence[str]=None, - wait: bool=False, wait_interval: int=5): + self, + *, + enable_ssh: bool = None, + skip_networking: bool = None, + skip_storage: bool = None, + commissioning_scripts: typing.Sequence[str] = None, + testing_scripts: typing.Sequence[str] = None, + wait: bool = False, + wait_interval: int = 5 + ): """Commission this machine. :param enable_ssh: Prevent the machine from powering off after running @@ -441,37 +448,42 @@ async def commission( params["skip_networking"] = skip_networking if skip_storage is not None: params["skip_storage"] = skip_storage - if (commissioning_scripts is not None and - len(commissioning_scripts) > 0): + if commissioning_scripts is not None and len(commissioning_scripts) > 0: params["commissioning_scripts"] = ",".join(commissioning_scripts) if testing_scripts is not None: - if len(testing_scripts) == 0: - params["testing_scripts"] = "" + if len(testing_scripts) == 0 or testing_scripts == "none": + params["testing_scripts"] = ["none"] else: params["testing_scripts"] = ",".join(testing_scripts) - self._data = await self._handler.commission(**params) + self._reset(await self._handler.commission(**params)) if not wait: return self else: # Wait for the machine to be fully commissioned. - while self.status in [ - NodeStatus.COMMISSIONING, NodeStatus.TESTING]: + while self.status in [NodeStatus.COMMISSIONING, NodeStatus.TESTING]: await asyncio.sleep(wait_interval) - self._data = await self._handler.read(system_id=self.system_id) + self._reset(await self._handler.read(system_id=self.system_id)) if self.status == NodeStatus.FAILED_COMMISSIONING: - msg = "{hostname} failed to commission.".format( - hostname=self.hostname) + msg = "{hostname} failed to commission.".format(hostname=self.hostname) raise FailedCommissioning(msg, self) if self.status == NodeStatus.FAILED_TESTING: - msg = "{hostname} failed testing.".format( - hostname=self.hostname) + msg = "{hostname} failed testing.".format(hostname=self.hostname) raise FailedTesting(msg, self) return self async def deploy( - self, *, user_data: typing.Union[bytes, str]=None, - distro_series: str=None, hwe_kernel: str=None, comment: str=None, - wait: bool=False, wait_interval: int=5): + self, + *, + user_data: typing.Union[bytes, str] = None, + distro_series: str = None, + hwe_kernel: str = None, + comment: str = None, + wait: bool = False, + install_kvm: bool = False, + wait_interval: int = 5, + ephemeral_deploy: bool = False, + enable_hw_sync: bool = False + ): """Deploy this machine. :param user_data: User-data to provide to the machine when booting. If @@ -484,8 +496,14 @@ async def deploy( :param comment: A comment for the event log. :param wait: If specified, wait until the deploy is complete. :param wait_interval: How often to poll, defaults to 5 seconds + :param ephemeral_deploy: Deploy a machine in Ephemeral mode + :param enable_hw_sync: Enables periodic hardware sync """ params = {"system_id": self.system_id} + + if install_kvm: + params["install_kvm"] = install_kvm + if user_data is not None: if isinstance(user_data, bytes): params["user_data"] = base64.encodebytes(user_data) @@ -499,22 +517,25 @@ async def deploy( params["hwe_kernel"] = hwe_kernel if comment is not None: params["comment"] = comment - self._data = await self._handler.deploy(**params) + if ephemeral_deploy: + params["ephemeral_deploy"] = ephemeral_deploy + if enable_hw_sync: + params["enable_hw_sync"] = enable_hw_sync + + self._reset(await self._handler.deploy(**params)) if not wait: return self else: # Wait for the machine to be fully deployed while self.status == NodeStatus.DEPLOYING: await asyncio.sleep(wait_interval) - self._data = await self._handler.read(system_id=self.system_id) + self._reset(await self._handler.read(system_id=self.system_id)) if self.status == NodeStatus.FAILED_DEPLOYMENT: - msg = "{hostname} failed to deploy.".format( - hostname=self.hostname - ) + msg = "{hostname} failed to deploy.".format(hostname=self.hostname) raise FailedDeployment(msg, self) return self - async def enter_rescue_mode(self, wait: bool=False, wait_interval: int=5): + async def enter_rescue_mode(self, wait: bool = False, wait_interval: int = 5): """ Send this machine into 'rescue mode'. @@ -522,8 +543,7 @@ async def enter_rescue_mode(self, wait: bool=False, wait_interval: int=5): :param wait_interval: How often to poll, defaults to 5 seconds """ try: - self._data = await self._handler.rescue_mode( - system_id=self.system_id) + self._reset(await self._handler.rescue_mode(system_id=self.system_id)) except CallError as error: if error.status == HTTPStatus.FORBIDDEN: message = "Not allowed to enter rescue mode" @@ -537,7 +557,7 @@ async def enter_rescue_mode(self, wait: bool=False, wait_interval: int=5): # Wait for machine to finish entering rescue mode while self.status == NodeStatus.ENTERING_RESCUE_MODE: await asyncio.sleep(wait) - self._data = await self._handler.read(system_id=self.system_id) + self._reset(await self._handler.read(system_id=self.system_id)) if self.status == NodeStatus.FAILED_ENTERING_RESCUE_MODE: msg = "{hostname} failed to enter rescue mode.".format( hostname=self.hostname @@ -545,7 +565,7 @@ async def enter_rescue_mode(self, wait: bool=False, wait_interval: int=5): raise RescueModeFailure(msg, self) return self - async def exit_rescue_mode(self, wait: bool=False, wait_interval: int=5): + async def exit_rescue_mode(self, wait: bool = False, wait_interval: int = 5): """ Exit rescue mode. @@ -553,9 +573,7 @@ async def exit_rescue_mode(self, wait: bool=False, wait_interval: int=5): :param wait_interval: How often to poll, defaults to 5 seconds """ try: - self._data = await self._handler.exit_rescue_mode( - system_id=self.system_id - ) + self._reset(await self._handler.exit_rescue_mode(system_id=self.system_id)) except CallError as error: if error.status == HTTPStatus.FORBIDDEN: message = "Not allowed to exit rescue mode." @@ -568,7 +586,7 @@ async def exit_rescue_mode(self, wait: bool=False, wait_interval: int=5): # Wait for machine to finish exiting rescue mode while self.status == NodeStatus.EXITING_RESCUE_MODE: await asyncio.sleep(wait_interval) - self._data = await self._handler.read(system_id=self.system_id) + self._reset(await self._handler.read(system_id=self.system_id)) if self.status == NodeStatus.FAILED_EXITING_RESCUE_MODE: msg = "{hostname} failed to exit rescue mode.".format( hostname=self.hostname @@ -590,40 +608,42 @@ async def get_details(self): :returns: Mapping of hardware details. """ data = await self._handler.details(system_id=self.system_id) - return bson.loads(data) + return bson.decode_all(data)[0] - async def mark_broken(self, *, comment: str=None): + async def mark_broken(self, *, comment: str = None): """Mark broken. :param comment: Reason machine is broken. :type comment: `str` """ - params = { - "system_id": self.system_id - } + params = {"system_id": self.system_id} if comment: params["comment"] = comment - self._data = await self._handler.mark_broken(**params) + self._reset(await self._handler.mark_broken(**params)) return self - async def mark_fixed(self, *, comment: str=None): + async def mark_fixed(self, *, comment: str = None): """Mark fixes. :param comment: Reason machine is fixed. :type comment: `str` """ - params = { - "system_id": self.system_id - } + params = {"system_id": self.system_id} if comment: params["comment"] = comment - self._data = await self._handler.mark_fixed(**params) + self._reset(await self._handler.mark_fixed(**params)) return self async def release( - self, *, comment: str=None, erase: bool=None, - secure_erase: bool=None, quick_erase: bool=None, - wait: bool=False, wait_interval: int=5): + self, + *, + comment: str = None, + erase: bool = None, + secure_erase: bool = None, + quick_erase: bool = None, + wait: bool = False, + wait_interval: int = 5 + ): """ Release the machine. @@ -640,24 +660,24 @@ async def release( :param wait_interval: How often to poll, defaults to 5 seconds. :type wait_interval: `int` """ - params = remove_None({ - "system_id": self.system_id, - "comment": comment, - "erase": erase, - "secure_erase": secure_erase, - "quick_erase": quick_erase, - }) - self._data = await self._handler.release(**params) + params = remove_None( + { + "system_id": self.system_id, + "comment": comment, + "erase": erase, + "secure_erase": secure_erase, + "quick_erase": quick_erase, + } + ) + self._reset(await self._handler.release(**params)) if not wait: return self else: # Wait for machine to be released - while self.status in [ - NodeStatus.RELEASING, NodeStatus.DISK_ERASING]: + while self.status in [NodeStatus.RELEASING, NodeStatus.DISK_ERASING]: await asyncio.sleep(wait_interval) try: - self._data = await self._handler.read( - system_id=self.system_id) + self._reset(await self._handler.read(system_id=self.system_id)) except CallError as error: if error.status == HTTPStatus.NOT_FOUND: # Release must have been on a machine in a pod. This @@ -667,19 +687,16 @@ async def release( else: raise if self.status == NodeStatus.FAILED_RELEASING: - msg = "{hostname} failed to be released.".format( - hostname=self.hostname - ) + msg = "{hostname} failed to be released.".format(hostname=self.hostname) raise FailedReleasing(msg, self) elif self.status == NodeStatus.FAILED_DISK_ERASING: - msg = "{hostname} failed to erase disk.".format( - hostname=self.hostname - ) + msg = "{hostname} failed to erase disk.".format(hostname=self.hostname) raise FailedDiskErasing(msg, self) return self async def power_on( - self, comment: str=None, wait: bool=False, wait_interval: int=5): + self, comment: str = None, wait: bool = False, wait_interval: int = 5 + ): """ Power on. @@ -694,7 +711,7 @@ async def power_on( if comment is not None: params["comment"] = comment try: - self._data = await self._handler.power_on(**params) + self._reset(await self._handler.power_on(**params)) except CallError as error: if error.status == HTTPStatus.FORBIDDEN: message = "Not allowed to power on machine." @@ -709,17 +726,19 @@ async def power_on( # Wait for machine to be powered on. while self.power_state == PowerState.OFF: await asyncio.sleep(wait_interval) - self._data = await self._handler.read(system_id=self.system_id) + self._reset(await self._handler.read(system_id=self.system_id)) if self.power_state == PowerState.ERROR: - msg = "{hostname} failed to power on.".format( - hostname=self.hostname - ) + msg = "{hostname} failed to power on.".format(hostname=self.hostname) raise PowerError(msg, self) return self async def power_off( - self, stop_mode: PowerStopMode=PowerStopMode.HARD, - comment: str=None, wait: bool=False, wait_interval: int=5): + self, + stop_mode: PowerStopMode = PowerStopMode.HARD, + comment: str = None, + wait: bool = False, + wait_interval: int = 5, + ): """ Power off. @@ -732,11 +751,11 @@ async def power_off( :param wait_interval: How often to poll, defaults to 5 seconds. :type wait_interval: `int` """ - params = {"system_id": self.system_id, 'stop_mode': stop_mode.value} + params = {"system_id": self.system_id, "stop_mode": stop_mode.value} if comment is not None: params["comment"] = comment try: - self._data = await self._handler.power_off(**params) + self._reset(await self._handler.power_off(**params)) except CallError as error: if error.status == HTTPStatus.FORBIDDEN: message = "Not allowed to power off machine." @@ -751,11 +770,9 @@ async def power_off( # Wait for machine to be powered off. while self.power_state == PowerState.ON: await asyncio.sleep(wait_interval) - self._data = await self._handler.read(system_id=self.system_id) + self._reset(await self._handler.read(system_id=self.system_id)) if self.power_state == PowerState.ERROR: - msg = "{hostname} failed to power off.".format( - hostname=self.hostname - ) + msg = "{hostname} failed to power off.".format(hostname=self.hostname) raise PowerError(msg, self) return self @@ -766,31 +783,59 @@ async def query_power_state(self): :returns: Current power state. :rtype: `PowerState` """ - power_data = await self._handler.query_power_state( - system_id=self.system_id) + power_data = await self._handler.query_power_state(system_id=self.system_id) # Update the internal state of this object as well, since we have the # updated power state from the BMC directly. MAAS server does this as # well, just do it client side to make it nice for a developer. - self._data['power_state'] = power_data['state'] - return PowerState(power_data['state']) + self._data["power_state"] = power_data["state"] + return PowerState(power_data["state"]) async def restore_default_configuration(self): """ Restore machine's configuration to its initial state. """ - self._data = await self._handler.restore_default_configuration( - system_id=self.system_id) + self._reset( + await self._handler.restore_default_configuration(system_id=self.system_id) + ) async def restore_networking_configuration(self): """ Restore machine's networking configuration to its initial state. """ - self._data = await self._handler.restore_networking_configuration( - system_id=self.system_id) + self._reset( + await self._handler.restore_networking_configuration( + system_id=self.system_id + ) + ) async def restore_storage_configuration(self): """ Restore machine's storage configuration to its initial state. """ - self._data = await self._handler.restore_storage_configuration( - system_id=self.system_id) + self._reset( + await self._handler.restore_storage_configuration(system_id=self.system_id) + ) + + async def lock(self, *, comment: str = None): + """Lock the machine to prevent changes. + + :param comment: Reason machine was locked. + :type comment: `str` + """ + params = {"system_id": self.system_id} + if comment: + params["comment"] = comment + self._reset(await self._handler.lock(**params)) + return self + + async def unlock(self, *, comment: str = None): + """Unlock the machine allowing changes. + + :param comment: Reason machine was unlocked. + :type comment: `str` + """ + params = {"system_id": self.system_id} + if comment: + params["comment"] = comment + self._reset(await self._handler.unlock(**params)) + return self diff --git a/maas/client/viscera/nodes.py b/maas/client/viscera/nodes.py index 2ba0dde4..3b181471 100644 --- a/maas/client/viscera/nodes.py +++ b/maas/client/viscera/nodes.py @@ -1,11 +1,14 @@ """Objects for nodes.""" -__all__ = [ - "Node", - "Nodes", -] +__all__ = ["Node", "Nodes"] + +try: + # Python <= 3.9 + from collections import Sequence +except: + # Python > 3.9 + from collections.abc import Sequence -from collections import Sequence import typing from . import ( @@ -18,13 +21,25 @@ ObjectType, to, ) -from ..enum import NodeType +from ..enum import NodeType, PowerState + + +def normalize_hostname(hostname): + """Strips the FQDN from the hostname, since hostname is unique in MAAS.""" + if hostname: + return hostname.split(".", 1)[0] + return hostname + + +def map_tag_name_to_dict(instance, value): + """Convert a tag name into a dictionary for Tag.""" + return {"name": value, "__incomplete__": True} class NodesType(ObjectType): """Metaclass for `Nodes`.""" - async def read(cls, *, hostnames: typing.Sequence[str]=None): + async def read(cls, *, hostnames: typing.Sequence[str] = None): """List nodes. :param hostnames: Sequence of hostnames to only return. @@ -32,7 +47,9 @@ async def read(cls, *, hostnames: typing.Sequence[str]=None): """ params = {} if hostnames: - params["hostname"] = hostnames + params["hostname"] = [ + normalize_hostname(hostname) for hostname in hostnames + ] data = await cls._handler.read(**params) return cls(map(cls._object, data)) @@ -52,27 +69,26 @@ async def read(cls, system_id): class Node(Object, metaclass=NodeTypeMeta): """A node stored in MAAS.""" - # domain - - fqdn = ObjectField.Checked( - "fqdn", check(str), readonly=True) - hostname = ObjectField.Checked( - "hostname", check(str), check(str)) + domain = ObjectFieldRelated("domain", "Domain") + fqdn = ObjectField.Checked("fqdn", check(str), readonly=True) + hostname = ObjectField.Checked("hostname", check(str), check(str)) interfaces = ObjectFieldRelatedSet("interface_set", "Interfaces") ip_addresses = ObjectField.Checked( # List[str] - "ip_addresses", check(Sequence), readonly=True) - node_type = ObjectField.Checked( - "node_type", to(NodeType), readonly=True) + "ip_addresses", check(Sequence), readonly=True + ) + node_type = ObjectField.Checked("node_type", to(NodeType), readonly=True) owner = ObjectFieldRelated("owner", "User") - system_id = ObjectField.Checked( - "system_id", check(str), readonly=True, pk=True) - tags = ObjectField.Checked( # List[str] - "tag_names", check(Sequence), readonly=True) + power_state = ObjectField.Checked("power_state", to(PowerState), readonly=True) + power_type = ObjectField.Checked("power_type", check(str)) + pool = ObjectFieldRelated("pool", "ResourcePool", use_data_setter=True) + system_id = ObjectField.Checked("system_id", check(str), readonly=True, pk=True) + tags = ObjectFieldRelatedSet( + "tag_names", "Tags", reverse=None, map_func=map_tag_name_to_dict + ) zone = ObjectFieldRelated("zone", "Zone") def __repr__(self): - return super(Node, self).__repr__( - fields={"system_id", "hostname"}) + return super(Node, self).__repr__(fields={"system_id", "hostname"}) def as_machine(self): """Convert to a `Machine` object. @@ -80,8 +96,7 @@ def as_machine(self): `node_type` must be `NodeType.MACHINE`. """ if self.node_type != NodeType.MACHINE: - raise ValueError( - 'Cannot convert to `Machine`, node_type is not a machine.') + raise ValueError("Cannot convert to `Machine`, node_type is not a machine.") return self._origin.Machine(self._data) def as_device(self): @@ -90,8 +105,7 @@ def as_device(self): `node_type` must be `NodeType.DEVICE`. """ if self.node_type != NodeType.DEVICE: - raise ValueError( - 'Cannot convert to `Device`, node_type is not a device.') + raise ValueError("Cannot convert to `Device`, node_type is not a device.") return self._origin.Device(self._data) def as_rack_controller(self): @@ -101,10 +115,13 @@ def as_rack_controller(self): `NodeType.REGION_AND_RACK_CONTROLLER`. """ if self.node_type not in [ - NodeType.RACK_CONTROLLER, NodeType.REGION_AND_RACK_CONTROLLER]: + NodeType.RACK_CONTROLLER, + NodeType.REGION_AND_RACK_CONTROLLER, + ]: raise ValueError( - 'Cannot convert to `RackController`, node_type is not a ' - 'rack controller.') + "Cannot convert to `RackController`, node_type is not a " + "rack controller." + ) return self._origin.RackController(self._data) def as_region_controller(self): @@ -114,9 +131,39 @@ def as_region_controller(self): `NodeType.REGION_AND_RACK_CONTROLLER`. """ if self.node_type not in [ - NodeType.REGION_CONTROLLER, - NodeType.REGION_AND_RACK_CONTROLLER]: + NodeType.REGION_CONTROLLER, + NodeType.REGION_AND_RACK_CONTROLLER, + ]: raise ValueError( - 'Cannot convert to `RegionController`, node_type is not a ' - 'region controller.') + "Cannot convert to `RegionController`, node_type is not a " + "region controller." + ) return self._origin.RegionController(self._data) + + async def get_power_parameters(self): + """Get the power paramters for this node.""" + data = await self._handler.power_parameters(system_id=self.system_id) + return data + + async def set_power( + self, power_type: str, power_parameters: typing.Mapping[str, typing.Any] = {} + ): + """Set the power type and power parameters for this node.""" + data = await self._handler.update( + system_id=self.system_id, + power_type=power_type, + power_parameters=power_parameters, + ) + self.power_type = data["power_type"] + + async def save(self): + # the resource pool uses the name in the API, not the id. The field is + # defined with use_data_setter=True, so the value in self._changed_data + # is the full data dict, not just the id. + if "pool" in self._changed_data: + self._changed_data["pool"] = self._changed_data["pool"]["name"] + return await super().save() + + async def delete(self): + """Deletes the node from MAAS.""" + await self._handler.delete(system_id=self.system_id) diff --git a/maas/client/viscera/partitions.py b/maas/client/viscera/partitions.py new file mode 100644 index 00000000..c29e250a --- /dev/null +++ b/maas/client/viscera/partitions.py @@ -0,0 +1,161 @@ +"""Objects for partitions.""" + +__all__ = ["Partition", "Partitions"] + +from . import check, Object, ObjectField, ObjectFieldRelated, ObjectSet, ObjectType +from .nodes import Node +from .block_devices import BlockDevice + + +def map_device_id_to_dict(instance, value): + """Convert a device_id into a dictionary for BlockDevice.""" + return { + "system_id": instance._data["system_id"], + "id": value, + "__incomplete__": True, + } + + +class PartitionType(ObjectType): + """Metaclass for `Partition`.""" + + async def read(cls, node, block_device, id): + """Get `Partition` by `id`.""" + if isinstance(node, str): + system_id = node + elif isinstance(node, Node): + system_id = node.system_id + else: + raise TypeError("node must be a Node or str, not %s" % type(node).__name__) + if isinstance(block_device, int): + block_device = block_device + elif isinstance(block_device, BlockDevice): + block_device = block_device.id + else: + raise TypeError( + "node must be a Node or str, not %s" % type(block_device).__name__ + ) + return cls( + await cls._handler.read(system_id=system_id, device_id=block_device, id=id) + ) + + +class Partition(Object, metaclass=PartitionType): + """A partition on a block device.""" + + block_device = ObjectFieldRelated( + "device_id", "BlockDevice", readonly=True, pk=0, map_func=map_device_id_to_dict + ) + id = ObjectField.Checked("id", check(int), readonly=True, pk=1) + uuid = ObjectField.Checked("uuid", check(str), readonly=True) + path = ObjectField.Checked("path", check(str), readonly=True) + size = ObjectField.Checked("size", check(int), readonly=True) + used_for = ObjectField.Checked("used_for", check(str), readonly=True) + + filesystem = ObjectFieldRelated("filesystem", "Filesystem", readonly=True) + + def __repr__(self): + return super(Partition, self).__repr__(fields={"path", "size"}) + + async def delete(self): + """Delete this partition.""" + await self._handler.delete( + system_id=self.block_device.node.system_id, + device_id=self.block_device.id, + id=self.id, + ) + + async def format(self, fstype, *, uuid=None): + """Format this partition.""" + self._reset( + await self._handler.format( + system_id=self.block_device.node.system_id, + device_id=self.block_device.id, + id=self.id, + fstype=fstype, + uuid=uuid, + ) + ) + + async def unformat(self): + """Unformat this partition.""" + self._reset( + await self._handler.unformat( + system_id=self.block_device.node.system_id, + device_id=self.block_device.id, + id=self.id, + ) + ) + + async def mount(self, mount_point, *, mount_options=None): + """Mount this partition.""" + self._reset( + await self._handler.mount( + system_id=self.block_device.node.system_id, + device_id=self.block_device.id, + id=self.id, + mount_point=mount_point, + mount_options=mount_options, + ) + ) + + async def umount(self): + """Unmount this partition.""" + self._reset( + await self._handler.unmount( + system_id=self.block_device.node.system_id, + device_id=self.block_device.id, + id=self.id, + ) + ) + + +class PartitionsType(ObjectType): + """Metaclass for `Partitions`.""" + + async def read(cls, node, block_device): + """Get list of `Partitions`'s for `node` and `block_device`.""" + if isinstance(node, str): + system_id = node + elif isinstance(node, Node): + system_id = node.system_id + else: + raise TypeError("node must be a Node or str, not %s" % type(node).__name__) + if isinstance(block_device, int): + block_device = block_device + elif isinstance(block_device, BlockDevice): + block_device = block_device.id + else: + raise TypeError( + "node must be a Node or str, not %s" % type(block_device).__name__ + ) + data = await cls._handler.read(system_id=system_id, device_id=block_device) + return cls(cls._object(item) for item in data) + + async def create(cls, block_device: BlockDevice, size: int): + """ + Create a partition on a block device. + + :param block_device: BlockDevice to create the paritition on. + :type block_device: `BlockDevice` + :param size: The size of the partition in bytes. + :type size: `int` + """ + params = {} + if isinstance(block_device, BlockDevice): + params["system_id"] = block_device.node.system_id + params["device_id"] = block_device.id + else: + raise TypeError( + "block_device must be a BlockDevice, not %s" + % (type(block_device).__name__) + ) + + if not size: + raise ValueError("size must be provided and greater than zero.") + params["size"] = size + return cls._object(await cls._handler.create(**params)) + + +class Partitions(ObjectSet, metaclass=PartitionsType): + """The set of partitions on a block device.""" diff --git a/maas/client/viscera/pods.py b/maas/client/viscera/pods.py new file mode 100644 index 00000000..c2c0cdb9 --- /dev/null +++ b/maas/client/viscera/pods.py @@ -0,0 +1,235 @@ +"""Objects for pods.""" + +__all__ = ["Pod", "Pods"] + +import typing +from . import check, Object, ObjectField, ObjectFieldRelated, ObjectSet, ObjectType +from .zones import Zone +from ..utils import remove_None +from ..errors import OperationNotAllowed + + +class PodsType(ObjectType): + """Metaclass for `Pods`.""" + + async def read(cls): + data = await cls._handler.read() + return cls(map(cls._object, data)) + + async def create( + cls, + *, + type: str, + power_address: str, + power_user: str = None, + power_pass: str = None, + name: str = None, + zone: typing.Union[str, Zone] = None, + tags: typing.Sequence[str] = None + ): + """Create a `Pod` in MAAS. + + :param type: Type of pod to create (rsd, virsh) (required). + :type name: `str` + :param power_address: Address for power control of the pod (required). + :type power_address: `str` + :param power_user: User for power control of the pod + (required for rsd). + :type power_user: `str` + :param power_pass: Password for power control of the pod + (required for rsd). + :type power_pass: `str` + :param name: Name for the pod (optional). + :type name: `str` + :param zone: Name of the zone for the pod (optional). + :type zone: `str` or `Zone` + :param tags: A tag or tags (separated by comma) for the pod. + :type tags: `str` + :returns: The created Pod. + :rtype: `Pod` + """ + params = remove_None( + { + "type": type, + "power_address": power_address, + "power_user": power_user, + "power_pass": power_pass, + "name": name, + "tags": tags, + } + ) + if type == "rsd" and power_user is None: + message = "'power_user' is required for pod type `rsd`" + raise OperationNotAllowed(message) + if type == "rsd" and power_pass is None: + message = "'power_pass' is required for pod type `rsd`" + raise OperationNotAllowed(message) + if zone is not None: + if isinstance(zone, Zone): + params["zone"] = zone.name + elif isinstance(zone, str): + params["zone"] = zone + else: + raise TypeError( + "zone must be a str or Zone, not %s" % type(zone).__name__ + ) + return cls._object(await cls._handler.create(**params)) + + +class Pods(ObjectSet, metaclass=PodsType): + """The set of `Pods` stored in MAAS.""" + + +class PodType(ObjectType): + """Metaclass for a `Pod`.""" + + async def read(cls, id: int): + """Get `Pod` by `id`.""" + data = await cls._handler.read(id=id) + return cls(data) + + +class Pod(Object, metaclass=PodType): + """A `Pod` stored in MAAS.""" + + id = ObjectField.Checked("id", check(int), readonly=True, pk=True) + type = ObjectField.Checked("type", check(str), check(str)) + name = ObjectField.Checked("name", check(str), check(str)) + architectures = ObjectField.Checked("architectures", check(list), check(list)) + capabilities = ObjectField.Checked("capabilities", check(list), check(list)) + zone = ObjectFieldRelated("zone", "Zone", readonly=True) + tags = ObjectField.Checked("tags", check(list), check(list)) + cpu_over_commit_ratio = ObjectField.Checked( + "cpu_over_commit_ratio", check(float), check(float) + ) + memory_over_commit_ratio = ObjectField.Checked( + "memory_over_commit_ratio", check(float), check(float) + ) + available = ObjectField.Checked("available", check(dict), check(dict)) + used = ObjectField.Checked("used", check(dict), check(dict)) + total = ObjectField.Checked("total", check(dict), check(dict)) + default_macvlan_mode = ObjectField.Checked( + "default_macvlan_mode", check(str), check(str) + ) + host = ObjectFieldRelated("host", "Node", readonly=True) + + async def save(self): + """Save the `Pod`.""" + old_tags = list(self._orig_data["tags"]) + new_tags = list(self.tags) + self._changed_data.pop("tags", None) + await super(Pod, self).save() + for tag_name in new_tags: + if tag_name not in old_tags: + await self._handler.add_tag(id=self.id, tag=tag_name) + else: + old_tags.remove(tag_name) + for tag_name in old_tags: + await self._handler.remove_tag(id=self.id, tag=tag_name) + self._orig_data["tags"] = new_tags + self._data["tags"] = list(new_tags) + + async def refresh(self): + """Refresh the `Pod`.""" + return await self._handler.refresh(id=self.id) + + async def parameters(self): + """Get the power parameters for the `Pod`.""" + return await self._handler.parameters(id=self.id) + + async def compose( + self, + *, + cores: int = None, + memory: int = None, + cpu_speed: int = None, + architecture: str = None, + storage: typing.Sequence[str] = None, + hostname: str = None, + domain: typing.Union[int, str] = None, + zone: typing.Union[int, str, Zone] = None, + interfaces: typing.Sequence[str] = None + ): + """Compose a machine from `Pod`. + + All fields below are optional: + + :param cores: Minimum number of CPU cores. + :type cores: `int` + :param memory: Minimum amount of memory (MiB). + :type memory: `int` + :param cpu_speed: Minimum amount of CPU speed (MHz). + :type cpu_speed: `int` + :param architecture: Architecture for the machine. Must be an + architecture that the pod supports. + :type architecture: `str` + :param storage: A list of storage constraint identifiers, in the form: +