Compare commits
No commits in common. "sifi" and "main" have entirely different histories.
|
|
@ -55,8 +55,6 @@ fact_caching_timeout=86400
|
|||
# (pathspec) Colon-separated paths in which Ansible will search for collections content. Collections must be in nested *subdirectories*, not directly in these directories. For example, if ``COLLECTIONS_PATHS`` includes ``'{{ ANSIBLE_HOME ~ "/collections" }}'``, and you want to add ``my.collection`` to that directory, it must be saved as ``'{{ ANSIBLE_HOME} ~ "/collections/ansible_collections/my/collection" }}'``.
|
||||
|
||||
;collections_path=/Users/fabioisti/.ansible/collections:/usr/share/ansible/collections
|
||||
collections_path=./playbooks/collections
|
||||
|
||||
|
||||
# (boolean) A boolean to enable or disable scanning the sys.path for installed collections.
|
||||
;collections_scan_sys_path=True
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
ansible_user: ansible
|
||||
#ansible_password: "{{ ansible_crypted_password }}"
|
||||
ansible_password: "{{ ansible_crypted_password }}"
|
||||
ansible_python_interpreter: /usr/bin/python3
|
||||
|
|
@ -1,9 +1,7 @@
|
|||
$ANSIBLE_VAULT;1.1;AES256
|
||||
36323030396532326432653332633031386532393564653565623231343037376432626337353666
|
||||
3136663234656635613337653036356366363431346263630a313034633838663263663662653639
|
||||
64333761373334633465363632646366656430626362623630343830383735663830303462623630
|
||||
6465323036343965640a393536313637623930613431643962613237363733653163613366643837
|
||||
64616462626165396632353365666334363035393864386534363831643631646530663739323538
|
||||
61646635343264393737336666653330383863623362663166306632653939376463653362363431
|
||||
33366334306330643266303730653863633363303964316361626665363262343833323063343932
|
||||
31313938626338326431
|
||||
37333066623836633836613066346434626134336537663236396639346235386362336637376534
|
||||
3833636230313835326663306236333837343337393530390a636464393562346662613838343738
|
||||
39356439343862633937313539323661303866316164343830363431626435396636386366376263
|
||||
6536393735363663650a383461666230633838303436643837636562343366313235393264666462
|
||||
38643366653861666364363538333230656539663134646566666664626463343433613166393337
|
||||
3432333863646664336262353262333635323436326430376465
|
||||
|
|
|
|||
|
|
@ -1,119 +0,0 @@
|
|||
bind_allow_query:
|
||||
- "any"
|
||||
|
||||
bind_listen:
|
||||
ipv4:
|
||||
- port: 53
|
||||
addresses:
|
||||
- "127.0.0.1"
|
||||
- "{{ ansible_default_ipv4.address }}"
|
||||
- port: 5353
|
||||
addresses:
|
||||
- "127.0.1.1"
|
||||
ipv6:
|
||||
- port: 53
|
||||
addresses:
|
||||
- "{{ ansible_default_ipv4.address }}"
|
||||
|
||||
|
||||
bind_zones:
|
||||
- name: 'sifi.isti.cnr.it'
|
||||
# default: primary [primary, secondary, forward]
|
||||
# type: primary
|
||||
# create_forward_zones: true
|
||||
# Skip creation of reverse zones
|
||||
# create_reverse_zones: false
|
||||
# fpr type: secondary
|
||||
primaries:
|
||||
- 146.48.108.51
|
||||
networks:
|
||||
- '146.48.108'
|
||||
#ipv6_networks:
|
||||
# - '2001:db9::/48'
|
||||
name_servers:
|
||||
- ns1.sifi.isti.cnr.it.
|
||||
# hostmaster_email: admin
|
||||
#
|
||||
#allow_updates:
|
||||
# - "10.0.1.2"
|
||||
# - 'key "external-dns"'
|
||||
#allow_transfers:
|
||||
# - 'key "external-dns"'
|
||||
hosts:
|
||||
- name: ns1
|
||||
ip: 146.48.108.51
|
||||
- name: bigbrain
|
||||
ip: 146.48.108.14
|
||||
#ipv6: '2001:db9::1'
|
||||
#mail_servers:
|
||||
# - name: mail001
|
||||
# preference: 10
|
||||
|
||||
|
||||
bind_logging:
|
||||
enable: true
|
||||
channels:
|
||||
- channel: general
|
||||
file: "data/general.log"
|
||||
versions: 3
|
||||
size: 10M
|
||||
print_time: true # true | false
|
||||
print_category: true
|
||||
print_severity: true
|
||||
severity: dynamic # critical | error | warning | notice | info | debug [level] | dynamic
|
||||
- channel: query
|
||||
file: "data/query.log"
|
||||
versions: 5
|
||||
size: 10M
|
||||
print_time: "" # true | false
|
||||
severity: info #
|
||||
- channel: dnssec
|
||||
file: "data/dnssec.log"
|
||||
versions: 5
|
||||
size: 10M
|
||||
print_time: "" # true | false
|
||||
severity: info #
|
||||
- channel: notify
|
||||
file: "data/notify.log"
|
||||
versions: 5
|
||||
size: 10M
|
||||
print_time: "" # true | false
|
||||
severity: info #
|
||||
- channel: transfers
|
||||
file: "data/transfers.log"
|
||||
versions: 5
|
||||
size: 10M
|
||||
print_time: "" # true | false
|
||||
severity: info #
|
||||
- channel: slog
|
||||
syslog: security # kern | user | mail | daemon | auth | syslog | lpr |
|
||||
# news | uucp | cron | authpriv | ftp |
|
||||
# local0 | local1 | local2 | local3 |
|
||||
# local4 | local5 | local6 | local7
|
||||
# file: "data/transfers.log"
|
||||
#versions: 5
|
||||
#size: 10M
|
||||
print_time: "" # true | false
|
||||
severity: info #
|
||||
categories:
|
||||
"xfer-out":
|
||||
- transfers
|
||||
- slog
|
||||
"xfer-in":
|
||||
- transfers
|
||||
- slog
|
||||
notify:
|
||||
- notify
|
||||
"lame-servers":
|
||||
- general
|
||||
config:
|
||||
- general
|
||||
default:
|
||||
- general
|
||||
security:
|
||||
- general
|
||||
- slog
|
||||
dnssec:
|
||||
- dnssec
|
||||
queries:
|
||||
- query
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
#SIFI
|
||||
sifi:
|
||||
children:
|
||||
opn:
|
||||
hosts:
|
||||
ns1.sifi.isti.cnr.it:
|
||||
ansible_host: 146.48.108.51
|
||||
nameserver:
|
||||
hosts:
|
||||
opn:
|
||||
workers:
|
||||
hosts:
|
||||
worker1.sifi.isti.cnr.it:
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
- hosts: all
|
||||
become: yes
|
||||
#debugger: on_failed
|
||||
debugger: on_failed
|
||||
tasks:
|
||||
- name: Add the ansible group
|
||||
group:
|
||||
|
|
@ -18,7 +18,7 @@
|
|||
password: "{{ ansible_crypted_password | password_hash('sha512') }}"
|
||||
shell: /bin/bash
|
||||
# Uncomment to prevent password reset
|
||||
update_password: on_create
|
||||
# update_password: on_create
|
||||
system: yes
|
||||
home: /srv/ansible
|
||||
state: present
|
||||
|
|
@ -31,27 +31,6 @@
|
|||
group: root
|
||||
mode: 0440
|
||||
|
||||
|
||||
- name: Init cache directory
|
||||
ansible.builtin.file:
|
||||
path: /var/cache/ansible
|
||||
owner: ansible
|
||||
group: ansible
|
||||
state: directory
|
||||
mode: u=rwx,g=rw,o=r
|
||||
|
||||
- name: Init etc directory
|
||||
ansible.builtin.file:
|
||||
path: /etc/ansible
|
||||
owner: ansible
|
||||
group: ansible
|
||||
state: directory
|
||||
mode: u=rwx,g=rw,o=r
|
||||
|
||||
|
||||
# Inserts public keys of allowed externals users to log in as ansible
|
||||
# e.g. fabio
|
||||
|
||||
- name: Create the .ssh directory
|
||||
file: path=/srv/ansible/.ssh owner=ansible group=ansible mode=0700 state=directory
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +0,0 @@
|
|||
download_url: https://galaxy.ansible.com/api/v3/plugin/ansible/content/published/collections/artifacts/bodsch-core-2.10.1.tar.gz
|
||||
format_version: 1.0.0
|
||||
name: core
|
||||
namespace: bodsch
|
||||
server: https://galaxy.ansible.com/api/
|
||||
signatures: []
|
||||
version: 2.10.1
|
||||
version_url: /api/v3/plugin/ansible/content/published/collections/index/bodsch/core/versions/2.10.1/
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
download_url: https://galaxy.ansible.com/api/v3/plugin/ansible/content/published/collections/artifacts/bodsch-dns-1.4.0.tar.gz
|
||||
format_version: 1.0.0
|
||||
name: dns
|
||||
namespace: bodsch
|
||||
server: https://galaxy.ansible.com/api/
|
||||
signatures: []
|
||||
version: 1.4.0
|
||||
version_url: /api/v3/plugin/ansible/content/published/collections/index/bodsch/dns/versions/1.4.0/
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
download_url: https://galaxy.ansible.com/api/v3/plugin/ansible/content/published/collections/artifacts/bodsch-systemd-1.4.0.tar.gz
|
||||
format_version: 1.0.0
|
||||
name: systemd
|
||||
namespace: bodsch
|
||||
server: https://galaxy.ansible.com/api/
|
||||
signatures: []
|
||||
version: 1.4.0
|
||||
version_url: /api/v3/plugin/ansible/content/published/collections/index/bodsch/systemd/versions/1.4.0/
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
Contributing
|
||||
============
|
||||
If you want to contribute to a project and make it better, your help is very welcome.
|
||||
Contributing is also a great way to learn more about social coding on Github, new technologies and
|
||||
and their ecosystems and how to make constructive, helpful bug reports, feature requests and the
|
||||
noblest of all contributions: a good, clean pull request.
|
||||
|
||||
### How to make a clean pull request
|
||||
|
||||
Look for a project's contribution instructions. If there are any, follow them.
|
||||
|
||||
- Create a personal fork of the project on Github.
|
||||
- Clone the fork on your local machine. Your remote repo on Github is called `origin`.
|
||||
- Add the original repository as a remote called `upstream`.
|
||||
- If you created your fork a while ago be sure to pull upstream changes into your local repository.
|
||||
- Create a new branch to work on! Branch from `develop` if it exists, else from `master`.
|
||||
- Implement/fix your feature, comment your code.
|
||||
- Follow the code style of the project, including indentation.
|
||||
- If the project has tests run them!
|
||||
- Write or adapt tests as needed.
|
||||
- Add or change the documentation as needed.
|
||||
- Squash your commits into a single commit. Create a new branch if necessary.
|
||||
- Push your branch to your fork on Github, the remote `origin`.
|
||||
- From your fork open a pull request in the correct branch. Target the project's `develop` branch if there is one, else go for `master`!
|
||||
- If the maintainer requests further changes just push them to your branch. The PR will be updated automatically.
|
||||
- Once the pull request is approved and merged you can pull the changes from `upstream` to your local repo and delete
|
||||
your extra branch(es).
|
||||
|
||||
And last but not least: Always write your commit messages in the present tense.
|
||||
Your commit message should describe what the commit, when applied, does to the
|
||||
code – not what you did to the code.
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,201 +0,0 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
{
|
||||
"collection_info": {
|
||||
"namespace": "bodsch",
|
||||
"name": "core",
|
||||
"version": "2.10.1",
|
||||
"authors": [
|
||||
"Bodo Schulz <bodo@boone-schulz.de>"
|
||||
],
|
||||
"readme": "README.md",
|
||||
"tags": [
|
||||
"pki",
|
||||
"vpn",
|
||||
"openvpn",
|
||||
"easyrsa",
|
||||
"certificate",
|
||||
"security",
|
||||
"automation"
|
||||
],
|
||||
"description": "collection of core modules for my ansible roles",
|
||||
"license": [
|
||||
"Apache-2.0"
|
||||
],
|
||||
"license_file": null,
|
||||
"dependencies": {
|
||||
"ansible.utils": "*",
|
||||
"ansible.posix": "*",
|
||||
"community.general": ">=10.5"
|
||||
},
|
||||
"repository": "https://github.com/bodsch/ansible-collection-core",
|
||||
"documentation": "https://github.com/bodsch/ansible-collection-core/README.md",
|
||||
"homepage": "https://github.com/bodsch/ansible-collection-core",
|
||||
"issues": "https://github.com/bodsch/ansible-collection-core/issues"
|
||||
},
|
||||
"file_manifest_file": {
|
||||
"name": "FILES.json",
|
||||
"ftype": "file",
|
||||
"chksum_type": "sha256",
|
||||
"chksum_sha256": "bf021256b84411724fe68c0611287919125acc1d3ea4ebc1fdef0fad58e10ced",
|
||||
"format": 1
|
||||
},
|
||||
"format": 1
|
||||
}
|
||||
|
|
@ -1,420 +0,0 @@
|
|||
# Ansible Collection - bodsch.core
|
||||
|
||||
Documentation for the collection.
|
||||
|
||||
This collection aims to offer an set of ansible modules or helper functions.
|
||||
|
||||
## supported Operating systems
|
||||
|
||||
Tested on
|
||||
|
||||
* ArchLinux
|
||||
* Debian based
|
||||
- Debian 10 / 11 / 12 / 13
|
||||
- Ubuntu 20.04 / 22.04 / 24.04
|
||||
|
||||
> **RedHat-based systems are no longer officially supported! May work, but does not have to.**
|
||||
|
||||
|
||||
## Requirements & Dependencies
|
||||
|
||||
- `dnspython`
|
||||
- `dirsync`
|
||||
- `netaddr`
|
||||
|
||||
```bash
|
||||
pip install dnspython
|
||||
pip install dirsync
|
||||
pip install netaddr
|
||||
```
|
||||
|
||||
## Included content
|
||||
|
||||
|
||||
### Roles
|
||||
|
||||
| Role | Build State | Description |
|
||||
|:---------------------------------------------------------------------------| :---------: | :---- |
|
||||
| [bodsch.core.pacman](./roles/pacman/README.md) | [][pacman] | Ansible role to configure pacman. |
|
||||
| [bodsch.core.fail2ban](./roles/fail2ban/README.md) | [][fail2ban] | Installs and configure fail2ban |
|
||||
| [bodsch.core.syslog_ng](./roles/syslog_ng/README.md) | [][syslog_ng] | Installs and configures a classic syslog-ng service for processing log files away from journald. |
|
||||
| [bodsch.core.logrotate](./roles/logrotate/README.md) | [][logrotate] | Installs logrotate and provides an easy way to setup additional logrotate scripts |
|
||||
| [bodsch.core.mount](./roles/mount/README.md) | [][mount] | Manage generic mountpoints |
|
||||
| [bodsch.core.openvpn](./roles/openvpn/README.md) | [][openvpn] | Ansible role to install and configure openvpn server. |
|
||||
| [bodsch.core.sysctl](./roles/sysctl/README.md) | [][sysctl] | Ansible role to configure sysctl. |
|
||||
| [bodsch.core.sshd](./roles/sshd/README.md) | [][sshd] | Ansible role to configure sshd. |
|
||||
|
||||
[pacman]: https://github.com/bodsch/ansible-collection-core/actions/workflows/pacman.yml
|
||||
[fail2ban]: https://github.com/bodsch/ansible-collection-core/actions/workflows/fail2ban.yml
|
||||
[snakeoil]: https://github.com/bodsch/ansible-collection-core/actions/workflows/snakeoil.yml
|
||||
[syslog_ng]: https://github.com/bodsch/ansible-collection-core/actions/workflows/syslog_ng.yml
|
||||
[logrotate]: https://github.com/bodsch/ansible-collection-core/actions/workflows/logrotate.yml
|
||||
[mount]: https://github.com/bodsch/ansible-collection-core/actions/workflows/mount.yml
|
||||
[openvpn]: https://github.com/bodsch/ansible-collection-core/actions/workflows/openvpn.yml
|
||||
[sysctl]: https://github.com/bodsch/ansible-collection-core/actions/workflows/sysctl.yml
|
||||
[sshd]: https://github.com/bodsch/ansible-collection-core/actions/workflows/sshd.yml
|
||||
|
||||
### Modules
|
||||
|
||||
| Name | Description |
|
||||
|:--------------------------|:----|
|
||||
| [bodsch.core.aur](./plugins/modules/aur.py) | Installing packages for ArchLinux with aur |
|
||||
| [bodsch.core.check_mode](./plugins/modules/check_mode.py) | Replacement for `ansible_check_mode`. |
|
||||
| [bodsch.core.facts](./plugins/modules/facts.py) | Creates a facts file for ansible. |
|
||||
| [bodsch.core.remove_ansible_backups](./plugins/modules/remove_ansible_backups.py) | Remove older backup files created by ansible |
|
||||
| [bodsch.core.package_version](./plugins/modules/package_version.py) | Attempts to determine the version of a package to be installed or already installed. |
|
||||
| [bodsch.core.sync_directory](./plugins/modules/sync_directory.py) | Syncronises directories similar to rsync |
|
||||
| [bodsch.core.easyrsa](.plugins/modules/easyrsa.py) | Manage a Public Key Infrastructure (PKI) using EasyRSA. |
|
||||
| [bodsch.core.openvpn_client_certificate](.plugins/modules/openvpn_client_certificate.py) | Manage OpenVPN client certificates using EasyRSA. |
|
||||
| [bodsch.core.openvpn_crl](.plugins/modules/openvpn_crl.py) | |
|
||||
| [bodsch.core.openvpn_ovpn](.plugins/modules/openvpn_ovpn.py) | |
|
||||
| [bodsch.core.openvpn](.plugins/modules/openvpn.py) | |
|
||||
| [bodsch.core.openvpn_version](.plugins/modules/openvpn_version.py) | |
|
||||
| [bodsch.core.pip_requirements](.plugins/modules/pip_requirements.py) | This modules creates an requirement file to install python modules via pip. |
|
||||
| [bodsch.core.syslog_cmd](.plugins/modules/syslog_cmd.py) | Run syslog-ng with arbitrary command-line parameters |
|
||||
| [bodsch.core.apt_sources](.plugins/modules/apt_sources.py) | Manage APT deb822 (.sources) repositories with repo-specific keyrings. |
|
||||
|
||||
|
||||
### Module utils
|
||||
|
||||
| Name | Description |
|
||||
|:--------------------------|:----|
|
||||
| [bodsch.core.passlib_bcrypt5_compat](./plugins/module_utils/passlib_bcrypt5_compat.py) | Compatibility helpers for using `passlib` 1.7.4 with `bcrypt` 5.x |
|
||||
|
||||
|
||||
### Actions
|
||||
|
||||
| Name | Description |
|
||||
|:--------------------------|:----|
|
||||
| [bodsch.core.deploy_and_activate](./plugins/sction/deploy_and_activate.py) | Controller-side orchestration for deploying versioned binaries and activating them via symlinks. |
|
||||
|
||||
|
||||
## Installing this collection
|
||||
|
||||
You can install the memsource collection with the Ansible Galaxy CLI:
|
||||
|
||||
```bash
|
||||
#> ansible-galaxy collection install bodsch.core
|
||||
```
|
||||
|
||||
To install directly from GitHub:
|
||||
|
||||
```bash
|
||||
#> ansible-galaxy collection install git@github.com:bodsch/ansible-collection-core.git
|
||||
```
|
||||
|
||||
|
||||
You can also include it in a `requirements.yml` file and install it with `ansible-galaxy collection install -r requirements.yml`, using the format:
|
||||
|
||||
```yaml
|
||||
---
|
||||
collections:
|
||||
- name: bodsch.core
|
||||
# version: ">=2.8.x"
|
||||
```
|
||||
|
||||
The python module dependencies are not installed by `ansible-galaxy`. They can
|
||||
be manually installed using pip:
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
## Using this collection
|
||||
|
||||
|
||||
You can either call modules by their Fully Qualified Collection Name (FQCN), such as `bodsch.core.remove_ansible_backups`,
|
||||
or you can call modules by their short name if you list the `bodsch.core` collection in the playbook's `collections` keyword:
|
||||
|
||||
|
||||
## Examples
|
||||
|
||||
### `bodsch.core.aur`
|
||||
|
||||
```yaml
|
||||
- name: install collabora package via aur
|
||||
become: true
|
||||
become_user: aur_builder
|
||||
bodsch.core.aur:
|
||||
state: present
|
||||
name: collabora-online-server
|
||||
repository: "{{ collabora_arch.source_repository }}"
|
||||
async: 3200
|
||||
poll: 10
|
||||
register: _collabora_installed
|
||||
```
|
||||
|
||||
### `bodsch.core.check_mode`
|
||||
|
||||
```yaml
|
||||
- name: detect ansible check_mode
|
||||
bodsch.core.check_mode:
|
||||
register: _check_mode
|
||||
|
||||
- name: define check_mode
|
||||
ansible.builtin.set_fact:
|
||||
check_mode: '{{ _check_mode.check_mode }}'
|
||||
```
|
||||
|
||||
### `bodsch.core.deploy_and_activate`
|
||||
|
||||
```yaml
|
||||
- name: deploy and activate logstream_exporter version {{ logstream_exporter_version }}
|
||||
bodsch.core.deploy_and_activate:
|
||||
src_dir: "{{ logstream_exporter_local_tmp_directory }}"
|
||||
install_dir: "{{ logstream_exporter_install_path }}"
|
||||
link_dir: "/usr/bin"
|
||||
remote_src: false # "{{ 'true' if logstream_exporter_direct_download else 'false' }}"
|
||||
owner: "{{ logstream_exporter_system_user }}"
|
||||
group: "{{ logstream_exporter_system_group }}"
|
||||
mode: "0755"
|
||||
items:
|
||||
- name: "{{ logstream_exporter_release.binary }}"
|
||||
capability: "cap_net_raw+ep"
|
||||
notify:
|
||||
- restart logstream exporter
|
||||
```
|
||||
|
||||
### `bodsch.core.easyrsa`
|
||||
|
||||
```yaml
|
||||
- name: initialize easy-rsa - (this is going to take a long time)
|
||||
bodsch.core.easyrsa:
|
||||
pki_dir: '{{ openvpn_easyrsa.directory }}/pki'
|
||||
req_cn_ca: "{{ openvpn_certificate.req_cn_ca }}"
|
||||
req_cn_server: '{{ openvpn_certificate.req_cn_server }}'
|
||||
ca_keysize: 4096
|
||||
dh_keysize: "{{ openvpn_diffie_hellman_keysize }}"
|
||||
working_dir: '{{ openvpn_easyrsa.directory }}'
|
||||
force: true
|
||||
register: _easyrsa_result
|
||||
```
|
||||
|
||||
### `bodsch.core.facts`
|
||||
|
||||
```yaml
|
||||
- name: create custom facts
|
||||
bodsch.core.facts:
|
||||
state: present
|
||||
name: icinga2
|
||||
facts:
|
||||
version: "2.10"
|
||||
salt: fgmklsdfnjyxnvjksdfbkuser
|
||||
user: icinga2
|
||||
```
|
||||
|
||||
### `bodsch.core.openvpn_client_certificate`
|
||||
|
||||
```yaml
|
||||
- name: create or revoke client certificate
|
||||
bodsch.core.openvpn_client_certificate:
|
||||
clients:
|
||||
- name: molecule
|
||||
state: present
|
||||
roadrunner: false
|
||||
static_ip: 10.8.3.100
|
||||
remote: server
|
||||
port: 1194
|
||||
proto: udp
|
||||
device: tun
|
||||
ping: 20
|
||||
ping_restart: 45
|
||||
cert: molecule.crt
|
||||
key: molecule.key
|
||||
tls_auth:
|
||||
enabled: true
|
||||
- name: roadrunner_one
|
||||
state: present
|
||||
roadrunner: true
|
||||
static_ip: 10.8.3.10
|
||||
remote: server
|
||||
port: 1194
|
||||
proto: udp
|
||||
device: tun
|
||||
ping: 20
|
||||
ping_restart: 45
|
||||
cert: roadrunner_one.crt
|
||||
key: roadrunner_one.key
|
||||
tls_auth:
|
||||
enabled: true
|
||||
working_dir: /etc/easy-rsa
|
||||
```
|
||||
|
||||
### `bodsch.core.openvpn_crl`
|
||||
|
||||
```yaml
|
||||
- name: Check CRL status and include revoked certificates
|
||||
bodsch.core.openvpn_crl:
|
||||
state: status
|
||||
pki_dir: /etc/easy-rsa/pki
|
||||
list_revoked_certificates: true
|
||||
|
||||
- name: Warn if CRL expires within 14 days
|
||||
bodsch.core.openvpn_crl:
|
||||
state: status
|
||||
pki_dir: /etc/easy-rsa/pki
|
||||
warn_for_expire: true
|
||||
expire_in_days: 14
|
||||
register: crl_status
|
||||
|
||||
- name: Regenerate (renew) CRL using Easy-RSA
|
||||
bodsch.core.openvpn_crl:
|
||||
state: renew
|
||||
pki_dir: /etc/easy-rsa/pki
|
||||
working_dir: /etc/easy-rsa
|
||||
register: crl_renew
|
||||
```
|
||||
|
||||
### `bodsch.core.openvpn_ovpn`
|
||||
|
||||
```yaml
|
||||
- name: Force recreation of an existing .ovpn file
|
||||
bodsch.core.openvpn_ovpn:
|
||||
state: present
|
||||
username: carol
|
||||
destination_directory: /etc/openvpn/clients
|
||||
force: true
|
||||
```
|
||||
|
||||
### `bodsch.core.openvpn_version`
|
||||
|
||||
```yaml
|
||||
- name: Print parsed version
|
||||
ansible.builtin.debug:
|
||||
msg: "OpenVPN version: {{ openvpn.version }}"
|
||||
```
|
||||
|
||||
### `bodsch.core.openvpn`
|
||||
|
||||
```yaml
|
||||
- name: Generate tls-auth key (ta.key)
|
||||
bodsch.core.openvpn:
|
||||
state: genkey
|
||||
secret: /etc/openvpn/ta.key
|
||||
|
||||
- name: Generate tls-auth key only if marker does not exist
|
||||
bodsch.core.openvpn:
|
||||
state: genkey
|
||||
secret: /etc/openvpn/ta.key
|
||||
creates: /var/lib/openvpn/ta.key.created
|
||||
|
||||
- name: Force regeneration by removing marker first
|
||||
bodsch.core.openvpn:
|
||||
state: genkey
|
||||
secret: /etc/openvpn/ta.key
|
||||
creates: /var/lib/openvpn/ta.key.created
|
||||
force: true
|
||||
|
||||
- name: Create Easy-RSA client and write inline .ovpn
|
||||
bodsch.core.openvpn:
|
||||
state: create_user
|
||||
secret: /dev/null # required by module interface, not used here
|
||||
username: alice
|
||||
destination_directory: /etc/openvpn/clients
|
||||
chdir: /etc/easy-rsa
|
||||
|
||||
- name: Create user only if marker does not exist
|
||||
bodsch.core.openvpn:
|
||||
state: create_user
|
||||
secret: /dev/null
|
||||
username: bob
|
||||
destination_directory: /etc/openvpn/clients
|
||||
chdir: /etc/easy-rsa
|
||||
creates: /var/lib/openvpn/clients/bob.created
|
||||
```
|
||||
|
||||
### `bodsch.core.package_version`
|
||||
|
||||
```yaml
|
||||
- name: get version of available package
|
||||
bodsch.core.package_version:
|
||||
package_name: nano
|
||||
register: package_version
|
||||
```
|
||||
|
||||
### `bodsch.core.pip_requirements`
|
||||
|
||||
```yaml
|
||||
- name: create pip requirements file
|
||||
bodsch.core.pip_requirements:
|
||||
name: docker
|
||||
state: present
|
||||
requirements:
|
||||
- name: docker
|
||||
compare_direction: "=="
|
||||
version: 6.0.0
|
||||
|
||||
- name: setuptools
|
||||
version: 39.1.0
|
||||
|
||||
- name: requests
|
||||
versions:
|
||||
- ">= 2.28.0"
|
||||
- "< 2.30.0"
|
||||
- "!~ 1.1.0"
|
||||
register: pip_requirements
|
||||
```
|
||||
|
||||
### `bodsch.core.remove_ansible_backups`
|
||||
|
||||
```yaml
|
||||
---
|
||||
- name: remove older ansible backup files
|
||||
bodsch.core.remove_ansible_backups:
|
||||
path: /etc
|
||||
holds: 4
|
||||
```
|
||||
|
||||
### `bodsch.core.sync_directory`
|
||||
|
||||
```yaml
|
||||
- name: syncronize config for first run
|
||||
bodsch.core.sync_directory:
|
||||
source_directory: "{{ nextcloud_install_base_directory }}/nextcloud/{{ nextcloud_version }}/config_DIST"
|
||||
destination_directory: "{{ nextcloud_install_base_directory }}/nextcloud/config"
|
||||
arguments:
|
||||
verbose: true
|
||||
purge: false
|
||||
```
|
||||
|
||||
### `bodsch.core.syslog_cmd`
|
||||
|
||||
```yaml
|
||||
- name: detect config version
|
||||
bodsch.core.syslog_cmd:
|
||||
parameters:
|
||||
- --version
|
||||
when:
|
||||
- not running_in_check_mode
|
||||
register: _syslog_config_version
|
||||
|
||||
- name: validate syslog-ng config
|
||||
bodsch.core.syslog_cmd:
|
||||
parameters:
|
||||
- --syntax-only
|
||||
check_mode: true
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
## Contribution
|
||||
|
||||
Please read [Contribution](CONTRIBUTING.md)
|
||||
|
||||
## Development, Branches (Git Tags)
|
||||
|
||||
The `master` Branch is my *Working Horse* includes the "latest, hot shit" and can be complete broken!
|
||||
|
||||
If you want to use something stable, please use a [Tagged Version](https://github.com/bodsch/ansible-collection-core/tags)!
|
||||
|
||||
|
||||
## Author
|
||||
|
||||
- Bodo Schulz
|
||||
|
||||
## License
|
||||
|
||||
[Apache](LICENSE)
|
||||
|
||||
**FREE SOFTWARE, HELL YEAH!**
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
---
|
||||
|
||||
requires_ansible: '>=2.12'
|
||||
|
||||
platforms:
|
||||
- name: ArchLinux
|
||||
- name: Debian
|
||||
versions:
|
||||
- bullseye
|
||||
- bookworm
|
||||
- trixie
|
||||
- name: Ubuntu
|
||||
versions:
|
||||
# 20.04
|
||||
- focal
|
||||
# 22.04
|
||||
- jammy
|
||||
# 24.04
|
||||
- noble
|
||||
# 26.04
|
||||
# - resolute
|
||||
|
||||
python_versions:
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
- "3.13"
|
||||
|
|
@ -1,162 +0,0 @@
|
|||
# Collections Plugins Directory
|
||||
|
||||
## modules
|
||||
|
||||
### remove_ansible_backups
|
||||
|
||||
```shell
|
||||
ansible-doc --type module bodsch.core.remove_ansible_backups
|
||||
> BODSCH.CORE.REMOVE_ANSIBLE_BACKUPS (./collections/ansible_collections/bodsch/core/plugins/modules/remove_ansible_backups.py)
|
||||
|
||||
Remove older backup files created by ansible
|
||||
```
|
||||
|
||||
### package_version
|
||||
|
||||
```shell
|
||||
ansible-doc --type module bodsch.core.package_version
|
||||
> BODSCH.CORE.PACKAGE_VERSION (./collections/ansible_collections/bodsch/core/plugins/modules/package_version.py)
|
||||
|
||||
Attempts to determine the version of a package to be installed or already installed. Supports apt, pacman, dnf (or yum) as
|
||||
package manager.
|
||||
```
|
||||
|
||||
### aur
|
||||
|
||||
```shell
|
||||
ansible-doc --type module bodsch.core.aur
|
||||
> BODSCH.CORE.AUR (./collections/ansible_collections/bodsch/core/plugins/modules/aur.py)
|
||||
|
||||
This modules manages packages for ArchLinux on a target with aur (like [ansible.builtin.yum], [ansible.builtin.apt], ...).
|
||||
```
|
||||
|
||||
### journalctl
|
||||
|
||||
```shell
|
||||
> BODSCH.CORE.JOURNALCTL (./collections/ansible_collections/bodsch/core/plugins/modules/journalctl.py)
|
||||
|
||||
Query the systemd journal with a very limited number of possible parameters. In certain cases there are errors that are not
|
||||
clearly traceable but are logged in the journal. This module is intended to be a tool for error analysis.
|
||||
```
|
||||
|
||||
### facts
|
||||
|
||||
```shell
|
||||
|
||||
> BODSCH.CORE.FACTS (./collections/ansible_collections/bodsch/core/plugins/modules/facts.py)
|
||||
|
||||
Write Ansible Facts
|
||||
```
|
||||
|
||||
## module_utils
|
||||
|
||||
### `checksum`
|
||||
|
||||
```python
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.checksum import Checksum
|
||||
|
||||
c = Checksum()
|
||||
|
||||
print(c.checksum("fooo"))
|
||||
print(c.checksum_from_file("/etc/fstab"))
|
||||
|
||||
# ???
|
||||
c.compare("aaa", "bbb")
|
||||
c.save("test-check", "aaa")
|
||||
c.load("test-check")
|
||||
```
|
||||
|
||||
### `file`
|
||||
|
||||
```python
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.file import remove_file, create_link
|
||||
```
|
||||
|
||||
- `create_link(source, destination, force=False)`
|
||||
- `remove_file(file_name)`
|
||||
|
||||
### `directory`
|
||||
|
||||
```python
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.directory import create_directory
|
||||
```
|
||||
|
||||
- `create_directory(directory)`
|
||||
- `permstr_to_octal(modestr, umask)`
|
||||
- `current_state(directory)`
|
||||
- `fix_ownership(directory, force_owner=None, force_group=None, force_mode=False)`
|
||||
|
||||
|
||||
### `cache`
|
||||
|
||||
```python
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.cache.cache_valid import cache_valid
|
||||
```
|
||||
|
||||
- `cache_valid(module, cache_file_name, cache_minutes=60, cache_file_remove=True)`
|
||||
|
||||
### `template`
|
||||
|
||||
## lookup
|
||||
|
||||
### `file_glob`
|
||||
|
||||
## filter
|
||||
|
||||
### `types`
|
||||
|
||||
- `type()`
|
||||
- `config_bool(data, true_as="yes", false_as="no")`
|
||||
|
||||
### `verify`
|
||||
|
||||
- `compare_list(data_list, compare_to_list)`
|
||||
- `upgrade(install_path, bin_path)`
|
||||
|
||||
### `dns`
|
||||
|
||||
- `dns_lookup(timeout=3, extern_resolver=[])`
|
||||
|
||||
### `python`
|
||||
|
||||
- `python_extra_args(python_version=ansible_python.version, extra_args=[], break_system_packages=True)`
|
||||
|
||||
### `union_by`
|
||||
|
||||
- `union(docker_defaults_python_packages, union_by='name')`
|
||||
|
||||
### - `parse_checksum`
|
||||
|
||||
- `parse_checksum('nginx-prometheus-exporter', ansible_facts.system, system_architecture)`
|
||||
|
||||
## misc
|
||||
|
||||
This directory can be used to ship various plugins inside an Ansible collection. Each plugin is placed in a folder that
|
||||
is named after the type of plugin it is in. It can also include the `module_utils` and `modules` directory that
|
||||
would contain module utils and modules respectively.
|
||||
|
||||
Here is an example directory of the majority of plugins currently supported by Ansible:
|
||||
|
||||
```
|
||||
└── plugins
|
||||
├── action
|
||||
├── become
|
||||
├── cache
|
||||
├── callback
|
||||
├── cliconf
|
||||
├── connection
|
||||
├── filter
|
||||
├── httpapi
|
||||
├── inventory
|
||||
├── lookup
|
||||
├── module_utils
|
||||
├── modules
|
||||
├── netconf
|
||||
├── shell
|
||||
├── strategy
|
||||
├── terminal
|
||||
├── test
|
||||
└── vars
|
||||
```
|
||||
|
||||
A full list of plugin types can be found at [Working With Plugins](https://docs.ansible.com/ansible-core/2.14/plugins/plugins.html).
|
||||
|
|
@ -1,430 +0,0 @@
|
|||
"""
|
||||
binary_deploy.py (action plugin)
|
||||
|
||||
Controller-aware wrapper that supports:
|
||||
- remote_src=true: src_dir is on the remote host -> use activate_version_remote to do everything in one remote call.
|
||||
- remote_src=false: src_dir is on the controller -> verify local files, create install_dir remotely, transfer files,
|
||||
then let activate_version_remote enforce caps and symlinks.
|
||||
|
||||
This collapses the common "stat + fail + stat + stat + copy + file + capabilities + link" pattern into one task.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
"""Deploy binaries to install_dir and activate them via symlinks."""
|
||||
|
||||
TRANSFERS_FILES = True
|
||||
|
||||
def _get_items(self, args: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
""" """
|
||||
display.v(f"ActionModule::_get_items(args: {args})")
|
||||
|
||||
items = args.get("items") or []
|
||||
if not isinstance(items, list) or not items:
|
||||
raise AnsibleError("binary_deploy: 'items' must be a non-empty list")
|
||||
return items
|
||||
|
||||
def _local_item_path(self, src_dir: str, item: Dict[str, Any]) -> Tuple[str, str]:
|
||||
"""
|
||||
Returns (local_src_path, dest_filename).
|
||||
dest_filename is always item['name'].
|
||||
local source filename is item.get('src') or item['name'].
|
||||
"""
|
||||
display.v(f"ActionModule::_local_item_path(src_dir: {src_dir}, item: {item})")
|
||||
|
||||
name = str(item["name"])
|
||||
src_name = str(item.get("src") or name)
|
||||
return os.path.join(src_dir, src_name), name
|
||||
|
||||
def _ensure_local_files_exist(self, src_dir: str, items: List[Dict[str, Any]]) -> None:
|
||||
""" """
|
||||
display.v(f"ActionModule::_ensure_local_files_exist(src_dir: {src_dir}, items: {items})")
|
||||
|
||||
for it in items:
|
||||
local_src, _ = self._local_item_path(src_dir, it)
|
||||
if not os.path.isfile(local_src):
|
||||
raise AnsibleError(f"binary_deploy: missing extracted binary on controller: {local_src}")
|
||||
|
||||
def _probe_remote(
|
||||
self,
|
||||
*,
|
||||
tmp: Optional[str],
|
||||
task_vars: Dict[str, Any],
|
||||
module_args: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
""" """
|
||||
display.v(f"ActionModule::_probe_remote(tmp: {tmp}, task_vars, module_args: {module_args})")
|
||||
|
||||
return self._execute_module(
|
||||
module_name="bodsch.core.activate_version_remote",
|
||||
module_args=module_args,
|
||||
task_vars=task_vars,
|
||||
tmp=tmp,
|
||||
)
|
||||
|
||||
def _remote_copy_from_controller(
|
||||
self,
|
||||
*,
|
||||
tmp: Optional[str],
|
||||
task_vars: Dict[str, Any],
|
||||
src_dir: str,
|
||||
install_dir: str,
|
||||
items: List[Dict[str, Any]],
|
||||
mode: str,
|
||||
owner: Optional[str],
|
||||
group: Optional[str],
|
||||
) -> bool:
|
||||
"""
|
||||
Transfer controller-local binaries to remote install_dir using ansible.builtin.copy.
|
||||
Returns True if any file changed.
|
||||
"""
|
||||
display.v(f"ActionModule::_remote_copy_from_controller(tmp: {tmp}, task_vars, src_dir: {src_dir}, install_dir: {install_dir}, items: {items}, owner: {owner}, group: {group}, mode: {mode})")
|
||||
|
||||
changed_any = False
|
||||
|
||||
for it in items:
|
||||
local_src, dest_name = self._local_item_path(src_dir, it)
|
||||
dest_path = os.path.join(install_dir, dest_name)
|
||||
|
||||
module_args: Dict[str, Any] = {
|
||||
"src": local_src,
|
||||
"dest": dest_path,
|
||||
"remote_src": False,
|
||||
"mode": mode,
|
||||
}
|
||||
if owner:
|
||||
module_args["owner"] = owner
|
||||
if group:
|
||||
module_args["group"] = group
|
||||
|
||||
res = self._execute_module(
|
||||
module_name="ansible.builtin.copy",
|
||||
module_args=module_args,
|
||||
task_vars=task_vars,
|
||||
tmp=tmp,
|
||||
)
|
||||
changed_any = changed_any or bool(res.get("changed", False))
|
||||
|
||||
return changed_any
|
||||
|
||||
def run(self, tmp: str | None = None, task_vars: Dict[str, Any] | None = None) -> Dict[str, Any]:
|
||||
""" """
|
||||
display.v(f"ActionModule::run(tmp: {tmp}, task_vars)")
|
||||
|
||||
if task_vars is None:
|
||||
task_vars = {}
|
||||
|
||||
result: Dict[str, Any] = super().run(tmp, task_vars)
|
||||
args = self._task.args.copy()
|
||||
|
||||
remote_src = bool(args.get("remote_src", False))
|
||||
install_dir = str(args["install_dir"])
|
||||
link_dir = str(args.get("link_dir", "/usr/bin"))
|
||||
src_dir = args.get("src_dir")
|
||||
mode = str(args.get("mode", "0755"))
|
||||
owner = args.get("owner")
|
||||
group = args.get("group")
|
||||
cleanup_on_failure = bool(args.get("cleanup_on_failure", True))
|
||||
activation_name = args.get("activation_name")
|
||||
|
||||
items = self._get_items(args)
|
||||
|
||||
display.v(f" - remote_src : {remote_src}")
|
||||
display.v(f" - install_dir : {install_dir}")
|
||||
display.v(f" - src_dir : {src_dir}")
|
||||
display.v(f" - link_dir : {link_dir}")
|
||||
display.v(f" - owner : {owner}")
|
||||
display.v(f" - group : {group}")
|
||||
display.v(f" - cleanup_on_failure : {cleanup_on_failure}")
|
||||
display.v(f" - activation_name : {activation_name}")
|
||||
|
||||
# --- Probe ---
|
||||
probe_args: Dict[str, Any] = {
|
||||
"install_dir": install_dir,
|
||||
"link_dir": link_dir,
|
||||
"items": items,
|
||||
"activation_name": activation_name,
|
||||
"owner": owner,
|
||||
"group": group,
|
||||
"mode": mode,
|
||||
"cleanup_on_failure": cleanup_on_failure,
|
||||
"check_only": True,
|
||||
"copy": remote_src,
|
||||
}
|
||||
|
||||
display.v(f" - probe_args : {probe_args}")
|
||||
|
||||
# IMPORTANT: when remote_src=True (copy=True), src_dir must be passed and must be remote path
|
||||
if remote_src:
|
||||
if not src_dir:
|
||||
raise AnsibleError("binary_deploy: 'src_dir' is required when remote_src=true (remote path)")
|
||||
probe_args["src_dir"] = str(src_dir)
|
||||
|
||||
probe = self._probe_remote(tmp=tmp, task_vars=task_vars, module_args=probe_args)
|
||||
|
||||
display.v(f" - probe : {probe}")
|
||||
|
||||
# Check mode: never change
|
||||
if bool(task_vars.get("ansible_check_mode", False)):
|
||||
probe["changed"] = False
|
||||
return probe
|
||||
|
||||
if not probe.get("needs_update", False):
|
||||
probe["changed"] = False
|
||||
return probe
|
||||
|
||||
# --- Apply ---
|
||||
try:
|
||||
# Ensure install_dir exists on remote
|
||||
self._execute_module(
|
||||
module_name="ansible.builtin.file",
|
||||
module_args={"path": install_dir, "state": "directory"},
|
||||
task_vars=task_vars,
|
||||
tmp=tmp,
|
||||
)
|
||||
|
||||
if remote_src:
|
||||
# Remote -> Remote copy + perms/caps/links in one remote call
|
||||
apply_args = dict(probe_args)
|
||||
apply_args["check_only"] = False
|
||||
apply_args["copy"] = True
|
||||
apply_args["src_dir"] = str(src_dir)
|
||||
|
||||
return self._probe_remote(tmp=tmp, task_vars=task_vars, module_args=apply_args)
|
||||
|
||||
# Controller -> Remote transfer
|
||||
if not src_dir:
|
||||
raise AnsibleError("binary_deploy: 'src_dir' is required when remote_src=false (controller path)")
|
||||
src_dir = str(src_dir)
|
||||
|
||||
self._ensure_local_files_exist(src_dir, items)
|
||||
|
||||
copied_any = self._remote_copy_from_controller(
|
||||
tmp=tmp,
|
||||
task_vars=task_vars,
|
||||
src_dir=src_dir,
|
||||
install_dir=install_dir,
|
||||
items=items,
|
||||
mode=mode,
|
||||
owner=owner,
|
||||
group=group,
|
||||
)
|
||||
|
||||
# Enforce perms/caps/links in one remote call (no remote copy)
|
||||
apply_args = {
|
||||
"install_dir": install_dir,
|
||||
"link_dir": link_dir,
|
||||
"items": items,
|
||||
"activation_name": activation_name,
|
||||
"owner": owner,
|
||||
"group": group,
|
||||
"mode": mode,
|
||||
"cleanup_on_failure": cleanup_on_failure,
|
||||
"check_only": False,
|
||||
"copy": False,
|
||||
}
|
||||
applied = self._probe_remote(tmp=tmp, task_vars=task_vars, module_args=apply_args)
|
||||
applied["changed"] = bool(applied.get("changed", False)) or copied_any
|
||||
return applied
|
||||
|
||||
except Exception:
|
||||
if cleanup_on_failure:
|
||||
try:
|
||||
self._execute_module(
|
||||
module_name="ansible.builtin.file",
|
||||
module_args={"path": install_dir, "state": "absent"},
|
||||
task_vars=task_vars,
|
||||
tmp=tmp,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
raise
|
||||
|
||||
|
||||
# -- FRIEDHOF ---
|
||||
|
||||
def run_OLD(
|
||||
self, tmp: str | None = None, task_vars: Dict[str, Any] | None = None
|
||||
) -> Dict[str, Any]:
|
||||
""" """
|
||||
display.v(f"ActionModule::run(tmp: {tmp}, task_vars: {task_vars})")
|
||||
|
||||
if task_vars is None:
|
||||
task_vars = {}
|
||||
|
||||
result: Dict[str, Any] = super().run(tmp, task_vars)
|
||||
args = self._task.args.copy()
|
||||
|
||||
remote_src = bool(args.pop("remote_src", False))
|
||||
install_dir = str(args["install_dir"])
|
||||
items: List[Dict[str, Any]] = args.get("items") or []
|
||||
if not items:
|
||||
raise AnsibleError("binary_deploy: items must not be empty")
|
||||
|
||||
src_dir = args.get("src_dir")
|
||||
link_dir = args.get("link_dir", "/usr/bin")
|
||||
owner = args.get("owner")
|
||||
group = args.get("group")
|
||||
mode = args.get("mode", "0755")
|
||||
cleanup_on_failure = bool(args.get("cleanup_on_failure", True))
|
||||
activation_name = args.get("activation_name")
|
||||
|
||||
display.v(f" - remote_src : {remote_src}")
|
||||
display.v(f" - install_dir : {install_dir}")
|
||||
display.v(f" - src_dir : {src_dir}")
|
||||
display.v(f" - link_dir : {link_dir}")
|
||||
display.v(f" - owner : {owner}")
|
||||
display.v(f" - group : {group}")
|
||||
display.v(f" - cleanup_on_failure : {cleanup_on_failure}")
|
||||
display.v(f" - activation_name : {activation_name}")
|
||||
|
||||
# 1) Check-only probe (remote): decide whether we need to do anything.
|
||||
probe_args: Dict[str, Any] = {
|
||||
"install_dir": install_dir,
|
||||
"link_dir": link_dir,
|
||||
"items": items,
|
||||
"activation_name": activation_name,
|
||||
"owner": owner,
|
||||
"group": group,
|
||||
"mode": mode,
|
||||
"cleanup_on_failure": cleanup_on_failure,
|
||||
"check_only": True,
|
||||
"copy": remote_src,
|
||||
}
|
||||
|
||||
display.v(f" - probe_args : {probe_args}")
|
||||
|
||||
if remote_src:
|
||||
if not src_dir:
|
||||
raise AnsibleError(
|
||||
"binary_deploy: src_dir is required when remote_src=true"
|
||||
)
|
||||
probe_args["src_dir"] = src_dir
|
||||
|
||||
probe = self._execute_module(
|
||||
module_name="bodsch.core.activate_version_remote",
|
||||
module_args=probe_args,
|
||||
task_vars=task_vars,
|
||||
tmp=tmp,
|
||||
)
|
||||
|
||||
# In check mode: return probe result as-is (no changes).
|
||||
if bool(task_vars.get("ansible_check_mode", False)):
|
||||
probe["changed"] = False
|
||||
return probe
|
||||
|
||||
if not probe.get("needs_update", False):
|
||||
probe["changed"] = False
|
||||
return probe
|
||||
|
||||
# 2) Apply
|
||||
try:
|
||||
if remote_src:
|
||||
apply_args = dict(probe_args)
|
||||
apply_args["check_only"] = False
|
||||
apply_args["copy"] = True
|
||||
apply_args["src_dir"] = src_dir
|
||||
|
||||
applied = self._execute_module(
|
||||
module_name="bodsch.core.activate_version_remote",
|
||||
module_args=apply_args,
|
||||
task_vars=task_vars,
|
||||
tmp=tmp,
|
||||
)
|
||||
return applied
|
||||
|
||||
# Controller-local source: verify local files exist first.
|
||||
if not src_dir:
|
||||
raise AnsibleError(
|
||||
"binary_deploy: src_dir is required when remote_src=false"
|
||||
)
|
||||
|
||||
for it in items:
|
||||
name = str(it["name"])
|
||||
src_name = str(it.get("src") or name)
|
||||
local_path = os.path.join(src_dir, src_name)
|
||||
if not os.path.isfile(local_path):
|
||||
raise AnsibleError(
|
||||
f"binary_deploy: missing extracted binary on controller: {local_path}"
|
||||
)
|
||||
|
||||
# Ensure install_dir exists remotely
|
||||
dir_res = self._execute_module(
|
||||
module_name="ansible.builtin.file",
|
||||
module_args={"path": install_dir, "state": "directory"},
|
||||
task_vars=task_vars,
|
||||
tmp=tmp,
|
||||
)
|
||||
|
||||
# Transfer binaries controller -> remote
|
||||
copied_any = False
|
||||
for it in items:
|
||||
name = str(it["name"])
|
||||
src_name = str(it.get("src") or name)
|
||||
|
||||
copy_res = self._execute_module(
|
||||
module_name="ansible.builtin.copy",
|
||||
module_args={
|
||||
"src": os.path.join(src_dir, src_name),
|
||||
"dest": os.path.join(install_dir, name),
|
||||
"mode": mode,
|
||||
"owner": owner,
|
||||
"group": group,
|
||||
"remote_src": False,
|
||||
},
|
||||
task_vars=task_vars,
|
||||
tmp=tmp,
|
||||
)
|
||||
copied_any = copied_any or bool(copy_res.get("changed", False))
|
||||
|
||||
# Enforce caps + symlinks (no remote copy; files already in install_dir)
|
||||
apply_args = {
|
||||
"install_dir": install_dir,
|
||||
"link_dir": link_dir,
|
||||
"items": items,
|
||||
"activation_name": activation_name,
|
||||
"owner": owner,
|
||||
"group": group,
|
||||
"mode": mode,
|
||||
"cleanup_on_failure": cleanup_on_failure,
|
||||
"check_only": False,
|
||||
"copy": False,
|
||||
}
|
||||
applied = self._execute_module(
|
||||
module_name="bodsch.core.activate_version_remote",
|
||||
module_args=apply_args,
|
||||
task_vars=task_vars,
|
||||
tmp=tmp,
|
||||
)
|
||||
|
||||
applied["changed"] = (
|
||||
bool(applied.get("changed", False))
|
||||
or bool(dir_res.get("changed", False))
|
||||
or copied_any
|
||||
)
|
||||
return applied
|
||||
|
||||
except Exception as exc:
|
||||
if cleanup_on_failure:
|
||||
try:
|
||||
self._execute_module(
|
||||
module_name="ansible.builtin.file",
|
||||
module_args={"path": install_dir, "state": "absent"},
|
||||
task_vars=task_vars,
|
||||
tmp=tmp,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
raise
|
||||
|
|
@ -1,513 +0,0 @@
|
|||
"""
|
||||
deploy_and_activate.py (action plugin)
|
||||
|
||||
Controller-side orchestration for deploying versioned binaries and activating them via symlinks.
|
||||
|
||||
This action plugin wraps a remote worker module (bodsch.core.deploy_and_activate_remote)
|
||||
and provides two operational modes:
|
||||
|
||||
1) remote_src=False (controller-local source):
|
||||
- Validate that extracted binaries exist on the Ansible controller in src_dir.
|
||||
- Stage these files onto the remote host via ActionBase._transfer_file().
|
||||
- Invoke the remote worker module to copy into install_dir and enforce perms/caps/symlinks.
|
||||
|
||||
2) remote_src=True (remote-local source):
|
||||
- Assume binaries already exist on the remote host in src_dir.
|
||||
- Invoke the remote worker module to copy into install_dir and enforce perms/caps/symlinks.
|
||||
|
||||
Implementation note:
|
||||
- Do not call ansible.builtin.copy via _execute_module() to transfer controller-local files.
|
||||
That bypasses the copy action logic and will not perform controller->remote transfer reliably.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
TypedDict,
|
||||
cast,
|
||||
)
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
REMOTE_WORKER_MODULE = "bodsch.core.deploy_and_activate_remote"
|
||||
|
||||
DOCUMENTATION = r"""
|
||||
---
|
||||
module: deploy_and_activate
|
||||
short_description: Deploy binaries into a versioned directory and activate them via symlinks (action plugin)
|
||||
description:
|
||||
- Controller-side action plugin that orchestrates a remote worker module.
|
||||
- Supports controller-local sources (C(remote_src=false)) via controller->remote staging.
|
||||
- Supports remote-local sources (C(remote_src=true)) where binaries already exist on the target host.
|
||||
options:
|
||||
install_dir:
|
||||
description:
|
||||
- Versioned installation directory on the target host.
|
||||
type: path
|
||||
required: true
|
||||
src_dir:
|
||||
description:
|
||||
- Directory containing extracted binaries.
|
||||
- For C(remote_src=false) this path is on the controller.
|
||||
- For C(remote_src=true) this path is on the target host.
|
||||
type: path
|
||||
required: true
|
||||
remote_src:
|
||||
description:
|
||||
- If true, C(src_dir) is on the remote host (remote->remote copy).
|
||||
- If false, C(src_dir) is on the controller (controller->remote staging).
|
||||
type: bool
|
||||
default: false
|
||||
link_dir:
|
||||
description:
|
||||
- Directory where activation symlinks are created on the target host.
|
||||
type: path
|
||||
default: /usr/bin
|
||||
items:
|
||||
description:
|
||||
- List of binaries to deploy.
|
||||
- Each item supports C(name) (required), optional C(src), optional C(link_name), optional C(capability).
|
||||
type: list
|
||||
elements: dict
|
||||
required: true
|
||||
activation_name:
|
||||
description:
|
||||
- Item name or link_name used to determine "activated" status (worker module feature).
|
||||
type: str
|
||||
required: false
|
||||
owner:
|
||||
description:
|
||||
- Owner name or uid for deployed binaries.
|
||||
type: str
|
||||
required: false
|
||||
group:
|
||||
description:
|
||||
- Group name or gid for deployed binaries.
|
||||
type: str
|
||||
required: false
|
||||
mode:
|
||||
description:
|
||||
- File mode for deployed binaries (octal string).
|
||||
type: str
|
||||
default: "0755"
|
||||
cleanup_on_failure:
|
||||
description:
|
||||
- Remove install_dir if an exception occurs during apply.
|
||||
type: bool
|
||||
default: true
|
||||
author:
|
||||
- "Bodsch Core Collection"
|
||||
notes:
|
||||
- This is an action plugin. It delegates actual deployment work to C(bodsch.core.deploy_and_activate_remote).
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
- name: Deploy from controller cache (remote_src=false)
|
||||
bodsch.core.deploy_and_activate:
|
||||
remote_src: false
|
||||
src_dir: "/home/bodsch/.cache/ansible/logstream_exporter/1.0.0"
|
||||
install_dir: "/usr/local/opt/logstream_exporter/1.0.0"
|
||||
link_dir: "/usr/bin"
|
||||
owner: "logstream-exporter"
|
||||
group: "logstream-exporter"
|
||||
mode: "0755"
|
||||
items:
|
||||
- name: "logstream-exporter"
|
||||
capability: "cap_net_raw+ep"
|
||||
|
||||
- name: Deploy from remote extracted directory (remote_src=true)
|
||||
bodsch.core.deploy_and_activate:
|
||||
remote_src: true
|
||||
src_dir: "/var/cache/ansible/logstream_exporter/1.0.0"
|
||||
install_dir: "/usr/local/opt/logstream_exporter/1.0.0"
|
||||
items:
|
||||
- name: "logstream-exporter"
|
||||
"""
|
||||
|
||||
RETURN = r"""
|
||||
changed:
|
||||
description: Whether anything changed (as reported by the remote worker module).
|
||||
type: bool
|
||||
activated:
|
||||
description: Whether the activation symlink points into install_dir (worker module result).
|
||||
type: bool
|
||||
needs_update:
|
||||
description: Whether changes would be required (in probe/check mode output).
|
||||
type: bool
|
||||
plan:
|
||||
description: Per-item plan (in probe/check mode output).
|
||||
type: dict
|
||||
details:
|
||||
description: Per-item change details (in apply output).
|
||||
type: dict
|
||||
"""
|
||||
|
||||
|
||||
class ItemSpec(TypedDict, total=False):
|
||||
"""User-facing item specification passed to the remote worker module."""
|
||||
|
||||
name: str
|
||||
src: str
|
||||
link_name: str
|
||||
capability: str
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _LocalItem:
|
||||
"""Normalized local item for controller-side existence checks and staging."""
|
||||
|
||||
name: str
|
||||
src_rel: str
|
||||
local_src: str
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
"""Deploy binaries to install_dir and activate them via symlinks."""
|
||||
|
||||
TRANSFERS_FILES = True
|
||||
|
||||
def _get_items(self, args: Mapping[str, Any]) -> List[ItemSpec]:
|
||||
"""Validate and normalize the 'items' argument."""
|
||||
display.vv(f"ActionModule::_get_items(args: {dict(args)})")
|
||||
|
||||
raw_items = args.get("items") or []
|
||||
if not isinstance(raw_items, list) or not raw_items:
|
||||
raise AnsibleError("deploy_and_activate: 'items' must be a non-empty list")
|
||||
|
||||
out: List[ItemSpec] = []
|
||||
for idx, it in enumerate(raw_items):
|
||||
if not isinstance(it, dict):
|
||||
raise AnsibleError(f"deploy_and_activate: items[{idx}] must be a dict")
|
||||
if "name" not in it:
|
||||
raise AnsibleError(
|
||||
f"deploy_and_activate: items[{idx}] missing required key 'name'"
|
||||
)
|
||||
|
||||
name = str(it["name"]).strip()
|
||||
if not name:
|
||||
raise AnsibleError(
|
||||
f"deploy_and_activate: items[{idx}].name must not be empty"
|
||||
)
|
||||
|
||||
normalized: ItemSpec = cast(ItemSpec, dict(it))
|
||||
normalized["name"] = name
|
||||
out.append(normalized)
|
||||
|
||||
return out
|
||||
|
||||
def _normalize_local_items(
|
||||
self, controller_src_dir: str, items: Sequence[ItemSpec]
|
||||
) -> List[_LocalItem]:
|
||||
"""Build controller-local absolute paths for each item."""
|
||||
display.vv(
|
||||
f"ActionModule::_normalize_local_items(controller_src_dir: {controller_src_dir}, items: {list(items)})"
|
||||
)
|
||||
|
||||
out: List[_LocalItem] = []
|
||||
for it in items:
|
||||
name = str(it["name"])
|
||||
src_rel = str(it.get("src") or name)
|
||||
local_src = os.path.join(controller_src_dir, src_rel)
|
||||
out.append(_LocalItem(name=name, src_rel=src_rel, local_src=local_src))
|
||||
return out
|
||||
|
||||
def _ensure_local_files_exist(
|
||||
self, controller_src_dir: str, items: Sequence[ItemSpec]
|
||||
) -> None:
|
||||
"""Fail early if any controller-local binary is missing."""
|
||||
display.vv(
|
||||
f"ActionModule::_ensure_local_files_exist(controller_src_dir: {controller_src_dir}, items: {list(items)})"
|
||||
)
|
||||
|
||||
for it in self._normalize_local_items(controller_src_dir, items):
|
||||
display.vv(f"= local_src: {it.local_src}, src_rel: {it.src_rel}")
|
||||
if not os.path.isfile(it.local_src):
|
||||
raise AnsibleError(
|
||||
f"deploy_and_activate: missing extracted binary on controller: {it.local_src}"
|
||||
)
|
||||
|
||||
def _probe_remote(
|
||||
self,
|
||||
*,
|
||||
tmp: Optional[str],
|
||||
task_vars: Mapping[str, Any],
|
||||
module_args: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
"""Execute the remote worker module and return its result."""
|
||||
display.vv(
|
||||
f"ActionModule::_probe_remote(tmp: {tmp}, task_vars, module_args: {module_args})"
|
||||
)
|
||||
|
||||
remote = self._execute_module(
|
||||
module_name=REMOTE_WORKER_MODULE,
|
||||
module_args=module_args,
|
||||
task_vars=dict(task_vars),
|
||||
tmp=tmp,
|
||||
)
|
||||
display.vv(f"= result: {remote}")
|
||||
return remote
|
||||
|
||||
def _ensure_remote_dir(
|
||||
self,
|
||||
*,
|
||||
tmp: Optional[str],
|
||||
task_vars: Mapping[str, Any],
|
||||
path: str,
|
||||
mode: str = "0700",
|
||||
) -> None:
|
||||
"""Ensure a directory exists on the remote host."""
|
||||
display.vv(
|
||||
f"ActionModule::_ensure_remote_dir(tmp: {tmp}, task_vars, path: {path}, mode: {mode})"
|
||||
)
|
||||
|
||||
self._execute_module(
|
||||
module_name="ansible.builtin.file",
|
||||
module_args={"path": path, "state": "directory", "mode": mode},
|
||||
task_vars=dict(task_vars),
|
||||
tmp=tmp,
|
||||
)
|
||||
|
||||
def _create_remote_temp_dir(
|
||||
self, *, tmp: Optional[str], task_vars: Mapping[str, Any]
|
||||
) -> str:
|
||||
"""
|
||||
Create a remote temporary directory.
|
||||
|
||||
This avoids using ActionBase._make_tmp_path(), which is not available in all Ansible versions.
|
||||
"""
|
||||
display.vv(f"ActionModule::_create_remote_temp_dir(tmp: {tmp}, task_vars)")
|
||||
|
||||
res = self._execute_module(
|
||||
module_name="ansible.builtin.tempfile",
|
||||
module_args={"state": "directory", "prefix": "deploy-and-activate-"},
|
||||
task_vars=dict(task_vars),
|
||||
tmp=tmp,
|
||||
)
|
||||
path = res.get("path")
|
||||
if not path:
|
||||
raise AnsibleError(
|
||||
"deploy_and_activate: failed to create remote temporary directory"
|
||||
)
|
||||
return str(path)
|
||||
|
||||
def _stage_files_to_remote(
|
||||
self,
|
||||
*,
|
||||
tmp: Optional[str],
|
||||
task_vars: Mapping[str, Any],
|
||||
controller_src_dir: str,
|
||||
items: Sequence[ItemSpec],
|
||||
) -> Tuple[str, bool]:
|
||||
"""
|
||||
Stage controller-local files onto the remote host via ActionBase._transfer_file().
|
||||
|
||||
Returns:
|
||||
Tuple(remote_stage_dir, created_by_us)
|
||||
"""
|
||||
normalized = self._normalize_local_items(controller_src_dir, items)
|
||||
|
||||
if tmp:
|
||||
remote_stage_dir = tmp
|
||||
created_by_us = False
|
||||
else:
|
||||
remote_stage_dir = self._create_remote_temp_dir(
|
||||
tmp=tmp, task_vars=task_vars
|
||||
)
|
||||
created_by_us = True
|
||||
|
||||
display.vv(
|
||||
f"ActionModule::_stage_files_to_remote(remote_stage_dir: {remote_stage_dir}, created_by_us: {created_by_us})"
|
||||
)
|
||||
|
||||
self._ensure_remote_dir(
|
||||
tmp=tmp, task_vars=task_vars, path=remote_stage_dir, mode="0700"
|
||||
)
|
||||
|
||||
# Create required subdirectories on remote if src_rel contains paths.
|
||||
needed_dirs: Set[str] = set()
|
||||
for it in normalized:
|
||||
rel_dir = os.path.dirname(it.src_rel)
|
||||
if rel_dir and rel_dir not in (".", "/"):
|
||||
needed_dirs.add(os.path.join(remote_stage_dir, rel_dir))
|
||||
|
||||
for d in sorted(needed_dirs):
|
||||
self._ensure_remote_dir(tmp=tmp, task_vars=task_vars, path=d, mode="0700")
|
||||
|
||||
# Transfer files.
|
||||
for it in normalized:
|
||||
remote_dst = os.path.join(remote_stage_dir, it.src_rel)
|
||||
display.vv(f"ActionModule::_transfer_file({it.local_src} -> {remote_dst})")
|
||||
self._transfer_file(it.local_src, remote_dst)
|
||||
|
||||
return remote_stage_dir, created_by_us
|
||||
|
||||
def run(
|
||||
self, tmp: str | None = None, task_vars: Dict[str, Any] | None = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Action plugin entrypoint.
|
||||
|
||||
Args:
|
||||
tmp: Remote tmp directory (may be None depending on Ansible execution path).
|
||||
task_vars: Task variables.
|
||||
|
||||
Returns:
|
||||
Result dict compatible with Ansible task output.
|
||||
"""
|
||||
display.vv(f"ActionModule::run(tmp: {tmp}, task_vars)")
|
||||
|
||||
if task_vars is None:
|
||||
task_vars = {}
|
||||
|
||||
display.vv(f" - task_vars : {task_vars}")
|
||||
|
||||
_ = super().run(tmp, task_vars)
|
||||
args: Dict[str, Any] = self._task.args.copy()
|
||||
|
||||
remote_src = bool(args.get("remote_src", False))
|
||||
install_dir = str(args["install_dir"])
|
||||
link_dir = str(args.get("link_dir", "/usr/bin"))
|
||||
src_dir = args.get("src_dir")
|
||||
mode = str(args.get("mode", "0755"))
|
||||
owner = args.get("owner")
|
||||
group = args.get("group")
|
||||
cleanup_on_failure = bool(args.get("cleanup_on_failure", True))
|
||||
activation_name = args.get("activation_name")
|
||||
|
||||
items = self._get_items(args)
|
||||
|
||||
display.vv(f" - args : {args}")
|
||||
|
||||
display.vv(f" - remote_src : {remote_src}")
|
||||
display.vv(f" - install_dir : {install_dir}")
|
||||
display.vv(f" - src_dir : {src_dir}")
|
||||
display.vv(f" - link_dir : {link_dir}")
|
||||
display.vv(f" - owner : {owner}")
|
||||
display.vv(f" - group : {group}")
|
||||
display.vv(f" - cleanup_on_failure : {cleanup_on_failure}")
|
||||
display.vv(f" - activation_name : {activation_name}")
|
||||
|
||||
# --- Probe (remote) ---
|
||||
probe_args: Dict[str, Any] = {
|
||||
"install_dir": install_dir,
|
||||
"link_dir": link_dir,
|
||||
"items": list(items),
|
||||
"activation_name": activation_name,
|
||||
"owner": owner,
|
||||
"group": group,
|
||||
"mode": mode,
|
||||
"cleanup_on_failure": cleanup_on_failure,
|
||||
"check_only": True,
|
||||
"copy": remote_src,
|
||||
}
|
||||
|
||||
if remote_src:
|
||||
if not src_dir:
|
||||
raise AnsibleError(
|
||||
"deploy_and_activate: 'src_dir' is required when remote_src=true (remote path)"
|
||||
)
|
||||
probe_args["src_dir"] = str(src_dir)
|
||||
|
||||
display.vv(f" - probe_args : {probe_args}")
|
||||
probe = self._probe_remote(tmp=tmp, task_vars=task_vars, module_args=probe_args)
|
||||
|
||||
# Check mode: never change.
|
||||
if bool(task_vars.get("ansible_check_mode", False)):
|
||||
probe["changed"] = False
|
||||
return probe
|
||||
|
||||
# Early exit if nothing to do.
|
||||
if not probe.get("needs_update", False):
|
||||
probe["changed"] = False
|
||||
return probe
|
||||
|
||||
# --- Apply ---
|
||||
stage_dir: Optional[str] = None
|
||||
stage_created_by_us = False
|
||||
|
||||
try:
|
||||
self._ensure_remote_dir(
|
||||
tmp=tmp, task_vars=task_vars, path=install_dir, mode="0755"
|
||||
)
|
||||
|
||||
if remote_src:
|
||||
apply_args = dict(probe_args)
|
||||
apply_args["check_only"] = False
|
||||
apply_args["copy"] = True
|
||||
apply_args["src_dir"] = str(src_dir)
|
||||
return self._probe_remote(
|
||||
tmp=tmp, task_vars=task_vars, module_args=apply_args
|
||||
)
|
||||
|
||||
# Controller -> Remote staging -> Remote apply(copy=True)
|
||||
if not src_dir:
|
||||
raise AnsibleError(
|
||||
"deploy_and_activate: 'src_dir' is required when remote_src=false (controller path)"
|
||||
)
|
||||
|
||||
controller_src_dir = str(src_dir)
|
||||
self._ensure_local_files_exist(controller_src_dir, items)
|
||||
|
||||
stage_dir, stage_created_by_us = self._stage_files_to_remote(
|
||||
tmp=tmp,
|
||||
task_vars=task_vars,
|
||||
controller_src_dir=controller_src_dir,
|
||||
items=items,
|
||||
)
|
||||
|
||||
apply_args = {
|
||||
"install_dir": install_dir,
|
||||
"link_dir": link_dir,
|
||||
"items": list(items),
|
||||
"activation_name": activation_name,
|
||||
"owner": owner,
|
||||
"group": group,
|
||||
"mode": mode,
|
||||
"cleanup_on_failure": cleanup_on_failure,
|
||||
"check_only": False,
|
||||
"copy": True,
|
||||
"src_dir": stage_dir,
|
||||
}
|
||||
return self._probe_remote(
|
||||
tmp=tmp, task_vars=task_vars, module_args=apply_args
|
||||
)
|
||||
|
||||
except Exception:
|
||||
if cleanup_on_failure:
|
||||
try:
|
||||
self._execute_module(
|
||||
module_name="ansible.builtin.file",
|
||||
module_args={"path": install_dir, "state": "absent"},
|
||||
task_vars=dict(task_vars),
|
||||
tmp=tmp,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
raise
|
||||
|
||||
finally:
|
||||
# Best-effort cleanup of the remote staging dir only if we created it.
|
||||
if stage_dir and stage_created_by_us:
|
||||
try:
|
||||
self._execute_module(
|
||||
module_name="ansible.builtin.file",
|
||||
module_args={"path": stage_dir, "state": "absent"},
|
||||
task_vars=dict(task_vars),
|
||||
tmp=tmp,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,60 +0,0 @@
|
|||
# python 3 headers, required if submitting to Ansible
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
"""
|
||||
Diese Funktion geht rekursiv durch die Struktur (ob Dictionary oder Liste) und entfernt alle Einträge,
|
||||
die entweder None, einen leeren String, ein leeres Dictionary, eine leere Liste enthalten.
|
||||
|
||||
Für Dictionaries wird jedes Schlüssel-Wert-Paar überprüft, und es wird nur gespeichert, wenn der Wert nicht leer ist.
|
||||
Für Listen werden nur nicht-leere Elemente in das Ergebnis aufgenommen.
|
||||
|
||||
Es wurde eine Hilfsfunktion `is_empty` eingeführt, die überprüft, ob ein Wert als "leer" betrachtet werden soll.
|
||||
|
||||
Diese Funktion berücksichtigt nun explizit, dass boolesche Werte (True und False) nicht als leer betrachtet werden, sondern erhalten bleiben.
|
||||
In der is_empty-Funktion wurde eine Überprüfung hinzugefügt, um sicherzustellen, dass die Zahl 0 nicht als leer betrachtet wird.
|
||||
Wenn der Wert 0 ist, wird er beibehalten.
|
||||
"""
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
""" """
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
"remove_empty_values": self.remove_empty_values,
|
||||
}
|
||||
|
||||
def remove_empty_values(self, data):
|
||||
""" """
|
||||
display.vv(f"bodsch.core.remove_empty_values(self, {data})")
|
||||
|
||||
def is_empty(value):
|
||||
"""Überprüfen, ob der Wert leer ist (ignoriere boolesche Werte)."""
|
||||
if isinstance(value, bool):
|
||||
return False # Boolesche Werte sollen erhalten bleiben
|
||||
if value == 0:
|
||||
return False # Zahl 0 soll erhalten bleiben
|
||||
|
||||
return value in [None, "", {}, [], False]
|
||||
|
||||
if isinstance(data, dict):
|
||||
# Durch alle Schlüssel-Wert-Paare iterieren
|
||||
return {
|
||||
key: self.remove_empty_values(value)
|
||||
for key, value in data.items()
|
||||
if not is_empty(value)
|
||||
}
|
||||
elif isinstance(data, list):
|
||||
# Leere Listen und leere Elemente entfernen
|
||||
return [
|
||||
self.remove_empty_values(item) for item in data if not is_empty(item)
|
||||
]
|
||||
else:
|
||||
# Andere Typen direkt zurückgeben (einschließlich boolesche Werte)
|
||||
return data
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
DOCUMENTATION = """
|
||||
name: clients_type
|
||||
author: Bodo Schulz
|
||||
version_added: "1.0.4"
|
||||
|
||||
short_description: TBD
|
||||
|
||||
description:
|
||||
- TBD
|
||||
|
||||
options: {}
|
||||
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
|
||||
"""
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from ansible.utils.display import Display
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.dns_lookup import dns_lookup
|
||||
|
||||
__metaclass__ = type
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {"dns_lookup": self.lookup}
|
||||
|
||||
def lookup(self, dns_name, timeout=3, dns_resolvers=["9.9.9.9"]):
|
||||
"""
|
||||
use a simple DNS lookup, return results in a dictionary
|
||||
|
||||
similar to
|
||||
{'addrs': [], 'error': True, 'error_msg': 'No such domain instance', 'name': 'instance'}
|
||||
"""
|
||||
display.vv(f"bodsch.core.dns_lookup({dns_name}, {timeout}, {dns_resolvers})")
|
||||
|
||||
result = dns_lookup(dns_name, timeout, dns_resolvers)
|
||||
|
||||
display.vv(f"= return : {result}")
|
||||
|
||||
return result
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
# python 3 headers, required if submitting to Ansible
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
"""
|
||||
Ansible file jinja2 tests
|
||||
"""
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
"merge_jails": self.merge_jails,
|
||||
}
|
||||
|
||||
def __merge_two_dicts(self, x, y):
|
||||
z = x.copy() # start with x's keys and values
|
||||
z.update(y) # modifies z with y's keys and values & returns None
|
||||
return z
|
||||
|
||||
def __search(self, d, name):
|
||||
res = None
|
||||
for sub in d:
|
||||
if sub["name"] == name:
|
||||
res = sub
|
||||
break
|
||||
|
||||
return res
|
||||
|
||||
def __sort_list(self, _list, _filter):
|
||||
return sorted(_list, key=lambda k: k.get(_filter))
|
||||
|
||||
def merge_jails(self, defaults, data):
|
||||
""" """
|
||||
count_defaults = len(defaults)
|
||||
count_data = len(data)
|
||||
|
||||
# display.v("defaults: ({type}) {len} - {data} entries".format(data=defaults, type=type(defaults), len=count_defaults))
|
||||
# display.vv(json.dumps(data, indent=2, sort_keys=False))
|
||||
# display.v("data : ({type}) {len} - {data} entries".format(data=data, type=type(data), len=count_data))
|
||||
|
||||
result = []
|
||||
|
||||
# short way
|
||||
if count_defaults == 0:
|
||||
return self.__sort_list(data, "name")
|
||||
|
||||
if count_data == 0:
|
||||
return self.__sort_list(defaults, "name")
|
||||
|
||||
# our new list from users input
|
||||
for d in data:
|
||||
_name = d["name"]
|
||||
# search the name in the default map
|
||||
_defaults_name = self.__search(defaults, _name)
|
||||
# when not found, put these on the new result list
|
||||
if not _defaults_name:
|
||||
result.append(_defaults_name)
|
||||
else:
|
||||
# when found, remove these entry from the defaults list, its obsolete
|
||||
for i in range(len(defaults)):
|
||||
if defaults[i]["name"] == _name:
|
||||
del defaults[i]
|
||||
break
|
||||
|
||||
# add both lists and sort
|
||||
result = self.__sort_list(data + defaults, "name")
|
||||
|
||||
return result
|
||||
|
|
@ -1,88 +0,0 @@
|
|||
# python 3 headers, required if submitting to Ansible
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from typing import Any, Dict, Iterable, Optional, Tuple
|
||||
|
||||
from ansible.errors import AnsibleFilterError
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
"""
|
||||
Ansible filter plugin: host_id
|
||||
|
||||
Resolves a stable host identifier across Ansible versions and fact-injection styles.
|
||||
|
||||
Resolution order:
|
||||
1) ansible_facts['host'] (if present)
|
||||
2) ansible_facts['hostname'] (standard setup fact)
|
||||
3) inventory_hostname (always available as magic var)
|
||||
|
||||
Usage:
|
||||
{{ (ansible_facts | default({})) | host_id(inventory_hostname) }}
|
||||
"""
|
||||
|
||||
|
||||
class FilterModule:
|
||||
""" """
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
"hostname": self.hostname,
|
||||
}
|
||||
|
||||
def hostname(
|
||||
self,
|
||||
facts: Optional[Dict[str, Any]] = None,
|
||||
inventory_hostname: Optional[str] = None,
|
||||
prefer: Optional[Iterable[str]] = None,
|
||||
default: str = "",
|
||||
) -> str:
|
||||
"""
|
||||
Return a host identifier string using a preference list over facts.
|
||||
|
||||
Args:
|
||||
facts: Typically 'ansible_facts' (may be undefined/None).
|
||||
inventory_hostname: Magic var 'inventory_hostname' as last-resort fallback.
|
||||
prefer: Iterable of fact keys to try in order (default: ('host', 'hostname')).
|
||||
default: Returned if nothing else is available.
|
||||
|
||||
Returns:
|
||||
Resolved host identifier as string.
|
||||
"""
|
||||
display.vv(
|
||||
f"bodsch.core.hostname(self, facts, inventory_hostname: '{inventory_hostname}', prefer: '{prefer}', default: '{default}')"
|
||||
)
|
||||
|
||||
facts_dict = self._as_dict(facts)
|
||||
keys: Tuple[str, ...] = (
|
||||
tuple(prefer) if prefer is not None else ("host", "hostname")
|
||||
)
|
||||
|
||||
for key in keys:
|
||||
val = facts_dict.get(key)
|
||||
if val not in (None, ""):
|
||||
display.vv(f"= result: {str(val)}")
|
||||
return str(val)
|
||||
|
||||
if inventory_hostname not in (None, ""):
|
||||
display.vv(f"= result: {str(inventory_hostname)}")
|
||||
return str(inventory_hostname)
|
||||
|
||||
display.vv(f"= result: {str(default)}")
|
||||
|
||||
return str(default)
|
||||
|
||||
def _as_dict(self, value: Any) -> Dict[str, Any]:
|
||||
""" """
|
||||
if value is None:
|
||||
return {}
|
||||
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
|
||||
raise AnsibleFilterError(
|
||||
f"hostname expects a dict-like ansible_facts, got: {type(value)!r}"
|
||||
)
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {
|
||||
"linked_version": self.linked_version,
|
||||
}
|
||||
|
||||
def linked_version(self, data: dict, install_path: str, version: str):
|
||||
"""
|
||||
check for linked version in `install_path`
|
||||
|
||||
`data` are dictionary:
|
||||
{
|
||||
'exists': True,
|
||||
''path': '/usr/bin/influxd', ...,
|
||||
'islnk': True, ...,
|
||||
'lnk_source': '/opt/influxd/2.8.0/influxd',
|
||||
'lnk_target': '/opt/influxd/2.8.0/influxd', ...
|
||||
}
|
||||
`install_path`are string and NOT the filename!
|
||||
/opt/influxd/2.8.0
|
||||
|
||||
result: TRUE, when destination is a link and the base path equal with install path
|
||||
otherwise FALSE
|
||||
"""
|
||||
display.vv(
|
||||
f"bodsch.core.linked_version(self, data: {data}, install_path: {install_path}, version: {version})"
|
||||
)
|
||||
|
||||
_is_activated = False
|
||||
|
||||
_destination_exists = data.get("exists", False)
|
||||
|
||||
display.vvv(f" - destination exists : {_destination_exists}")
|
||||
|
||||
if _destination_exists:
|
||||
_destination_islink = data.get("islnk", False)
|
||||
_destination_lnk_source = data.get("lnk_source", None)
|
||||
_destination_path = data.get("path", None)
|
||||
|
||||
if _destination_lnk_source:
|
||||
_destination_path = os.path.dirname(_destination_lnk_source)
|
||||
|
||||
display.vvv(f" - is link : {_destination_islink}")
|
||||
display.vvv(f" - link src : {_destination_lnk_source}")
|
||||
display.vvv(f" - base path : {_destination_path}")
|
||||
|
||||
_is_activated = install_path == _destination_path
|
||||
|
||||
display.vv(f"= is activated: {_is_activated}")
|
||||
|
||||
return _is_activated
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
# python 3 headers, required if submitting to Ansible
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
"""
|
||||
Ansible file jinja2 tests
|
||||
"""
|
||||
|
||||
def filters(self):
|
||||
return {"fstypes": self.fstypes}
|
||||
|
||||
def fstypes(self, data):
|
||||
""" """
|
||||
result = []
|
||||
|
||||
display.vv(f"bodsch.core.fstypes({data}")
|
||||
|
||||
result = [d["fstype"] for d in data]
|
||||
|
||||
display.v("result {} {}".format(result, type(result)))
|
||||
|
||||
return result
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
# python 3 headers, required if submitting to Ansible
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
""" """
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
"openvpn_clients": self.openvpn_clients,
|
||||
}
|
||||
|
||||
def openvpn_clients(self, data, hostvars):
|
||||
"""
|
||||
combined_list: "{{ combined_list | default([]) + hostvars[item].openvpn_mobile_clients }}"
|
||||
"""
|
||||
display.vv(f"bodsch.core.openvpn_clients({data}, {hostvars})")
|
||||
|
||||
client = hostvars.get("openvpn_mobile_clients", None)
|
||||
if client and isinstance(client, list):
|
||||
data += client
|
||||
|
||||
return data
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
# python 3 headers, required if submitting to Ansible
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
"""
|
||||
Ansible file jinja2 tests
|
||||
"""
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
"persistent_pool": self.persistent_pool,
|
||||
"clients_type": self.clients_type,
|
||||
}
|
||||
|
||||
def persistent_pool(self, data):
|
||||
"""
|
||||
Get the type of a variable
|
||||
"""
|
||||
result = []
|
||||
|
||||
for i in data:
|
||||
name = i.get("name")
|
||||
if i.get("static_ip", None) is not None:
|
||||
d = dict(
|
||||
name=name,
|
||||
state=i.get("state", "present"),
|
||||
static_ip=i.get("static_ip"),
|
||||
)
|
||||
result.append(d)
|
||||
|
||||
display.v(f" = result : {result}")
|
||||
return result
|
||||
|
||||
def clients_type(self, data, type="static"):
|
||||
""" """
|
||||
result = []
|
||||
|
||||
for d in data:
|
||||
roadrunner = d.get("roadrunner", False)
|
||||
|
||||
if type == "static" and not roadrunner:
|
||||
result.append(d)
|
||||
|
||||
if type == "roadrunner" and roadrunner:
|
||||
result.append(d)
|
||||
|
||||
display.v(f" = result : {result}")
|
||||
return result
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import re
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {
|
||||
"parse_checksum": self.parse_checksum,
|
||||
}
|
||||
|
||||
def parse_checksum(self, data, application, os, arch, file_extension="tar.gz"):
|
||||
"""
|
||||
parse version string
|
||||
"""
|
||||
display.vv(
|
||||
f"bodsch.core.parse_checksum(self, data, {application}, {os}, {arch})"
|
||||
)
|
||||
|
||||
checksum = None
|
||||
os = os.lower()
|
||||
display.vvv(f" data: {data}")
|
||||
display.vvv(f" os: {os}")
|
||||
display.vvv(f" arch: {arch}")
|
||||
display.vvv(f" file_extension: {file_extension}")
|
||||
|
||||
if isinstance(data, list):
|
||||
# 206cf787c01921574ca171220bb9b48b043c3ad6e744017030fed586eb48e04b alertmanager-0.25.0.linux-amd64.tar.gz
|
||||
# (?P<checksum>[a-zA-Z0-9]+).*alertmanager[-_].*linux-amd64\.tar\.gz$
|
||||
checksum = [
|
||||
x
|
||||
for x in data
|
||||
if re.search(
|
||||
rf"(?P<checksum>[a-zA-Z0-9]+).*{application}[-_].*{os}[-_]{arch}\.{file_extension}",
|
||||
x,
|
||||
)
|
||||
][0]
|
||||
|
||||
display.vvv(f" found checksum: {checksum}")
|
||||
|
||||
if isinstance(checksum, str):
|
||||
checksum = checksum.split(" ")[0]
|
||||
|
||||
display.vv(f"= checksum: {checksum}")
|
||||
|
||||
return checksum
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
__metaclass__ = type
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {"python_extra_args": self.python_extra_args}
|
||||
|
||||
def python_extra_args(
|
||||
self, data, python_version, extra_args=[], break_system_packages=True
|
||||
):
|
||||
"""
|
||||
add extra args for python pip installation
|
||||
"""
|
||||
result = list(set(extra_args))
|
||||
|
||||
python_version_major = python_version.get("major", None)
|
||||
python_version_minor = python_version.get("minor", None)
|
||||
|
||||
if (
|
||||
int(python_version_major) == 3
|
||||
and int(python_version_minor) >= 11
|
||||
and break_system_packages
|
||||
):
|
||||
result.append("--break-system-packages")
|
||||
|
||||
# deduplicate
|
||||
result = list(set(result))
|
||||
|
||||
result = " ".join(result)
|
||||
|
||||
display.vv(f"= {result}")
|
||||
return result
|
||||
|
|
@ -1,185 +0,0 @@
|
|||
# python 3 headers, required if submitting to Ansible
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import json
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
""" """
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
"merge_lists": self.merge_lists,
|
||||
"sshd_values": self.sshd_values,
|
||||
}
|
||||
|
||||
def merge_lists(self, defaults, data):
|
||||
""" """
|
||||
count_defaults = len(defaults)
|
||||
count_data = len(data)
|
||||
|
||||
display.vv(
|
||||
"defaults: ({type}) {len} - {data} entries".format(
|
||||
data=defaults, type=type(defaults), len=count_defaults
|
||||
)
|
||||
)
|
||||
display.vv(json.dumps(data, indent=2, sort_keys=False))
|
||||
display.vv(
|
||||
"data : ({type}) {len} - {data} entries".format(
|
||||
data=data, type=type(data), len=count_data
|
||||
)
|
||||
)
|
||||
|
||||
result = []
|
||||
|
||||
# short way
|
||||
if count_defaults == 0:
|
||||
return data
|
||||
|
||||
if count_data == 0:
|
||||
return defaults
|
||||
|
||||
# our new list from users input
|
||||
for d in data:
|
||||
_name = d["host"]
|
||||
# search the name in the default map
|
||||
_defaults_name = self.__search(defaults, _name)
|
||||
# display.vv(f" _defaults_name : {_defaults_name}")
|
||||
# when not found, put these on the new result list
|
||||
if not _defaults_name:
|
||||
result.append(_defaults_name)
|
||||
else:
|
||||
# when found, remove these entry from the defaults list, its obsolete
|
||||
for i in range(len(defaults)):
|
||||
if defaults[i]["host"] == _name:
|
||||
del defaults[i]
|
||||
break
|
||||
|
||||
# add both lists and sort
|
||||
result = data + defaults
|
||||
|
||||
display.vv(f"= result: {result}")
|
||||
|
||||
return result
|
||||
|
||||
def sshd_values(self, data):
|
||||
"""
|
||||
Ersetzt die Keys in einer YAML-Struktur basierend auf einer gegebenen Key-Map.
|
||||
|
||||
:param data: Ansible Datenkonstrukt
|
||||
:return: Ansible Datenkonstrukt mit den ersetzten Keys.
|
||||
"""
|
||||
display.vv(f"bodsch.core.sshd_values({data})")
|
||||
|
||||
# Hilfsfunktion zur Rekursion
|
||||
def replace_keys(obj):
|
||||
"""
|
||||
:param key_map: Dictionary, das alte Keys mit neuen Keys mappt.
|
||||
"""
|
||||
key_map = {
|
||||
"port": "Port",
|
||||
"address_family": "AddressFamily",
|
||||
"listen_address": "ListenAddress",
|
||||
"host_keys": "HostKey",
|
||||
"rekey_limit": "RekeyLimit",
|
||||
"syslog_facility": "SyslogFacility",
|
||||
"log_level": "LogLevel",
|
||||
"log_verbose": "LogVerbose",
|
||||
"login_grace_time": "LoginGraceTime",
|
||||
"permit_root_login": "PermitRootLogin",
|
||||
"strict_modes": "StrictModes",
|
||||
"max_auth_tries": "MaxAuthTries",
|
||||
"max_sessions": "MaxSessions",
|
||||
"pubkey_authentication": "PubkeyAuthentication",
|
||||
"authorized_keys_file": "AuthorizedKeysFile",
|
||||
"authorized_principals_file": "AuthorizedPrincipalsFile",
|
||||
"authorized_keys_command": "AuthorizedKeysCommand",
|
||||
"authorized_keys_command_user": "AuthorizedKeysCommandUser",
|
||||
"hostbased_authentication": "HostbasedAuthentication",
|
||||
"hostbased_accepted_algorithms": "HostbasedAcceptedAlgorithms",
|
||||
"host_certificate": "HostCertificate",
|
||||
"host_key": "HostKey",
|
||||
"host_key_agent": "HostKeyAgent",
|
||||
"host_key_algorithms": "HostKeyAlgorithms",
|
||||
"ignore_user_known_hosts": "IgnoreUserKnownHosts",
|
||||
"ignore_rhosts": "IgnoreRhosts",
|
||||
"password_authentication": "PasswordAuthentication",
|
||||
"permit_empty_passwords": "PermitEmptyPasswords",
|
||||
"challenge_response_authentication": "ChallengeResponseAuthentication",
|
||||
"kerberos_authentication": "KerberosAuthentication",
|
||||
"kerberos_or_local_passwd": "KerberosOrLocalPasswd",
|
||||
"kerberos_ticket_cleanup": "KerberosTicketCleanup",
|
||||
"kerberos_get_afs_token": "KerberosGetAFSToken",
|
||||
"kex_algorithms": "KexAlgorithms",
|
||||
"gss_api_authentication": "GSSAPIAuthentication",
|
||||
"gss_api_cleanup_credentials": "GSSAPICleanupCredentials",
|
||||
"gss_api_strict_acceptor_check": "GSSAPIStrictAcceptorCheck",
|
||||
"gss_api_key_exchange": "GSSAPIKeyExchange",
|
||||
"use_pam": "UsePAM",
|
||||
"allow_agent_forwarding": "AllowAgentForwarding",
|
||||
"allow_tcp_forwarding": "AllowTcpForwarding",
|
||||
"gateway_ports": "GatewayPorts",
|
||||
"x11_forwarding": "X11Forwarding",
|
||||
"x11_display_offset": "X11DisplayOffset",
|
||||
"x11_use_localhost": "X11UseLocalhost",
|
||||
"permit_tty": "PermitTTY",
|
||||
"print_motd": "PrintMotd",
|
||||
"print_last_log": "PrintLastLog",
|
||||
"tcp_keep_alive": "TCPKeepAlive",
|
||||
"permituser_environment": "PermitUserEnvironment",
|
||||
"compression": "Compression",
|
||||
"client_alive_interval": "ClientAliveInterval",
|
||||
"client_alive_count_max": "ClientAliveCountMax",
|
||||
"ciphers": "Ciphers",
|
||||
"deny_groups": "DenyGroups",
|
||||
"deny_users": "DenyUsers",
|
||||
"macs": "MACs",
|
||||
"use_dns": "UseDNS",
|
||||
"pid_file": "PidFile",
|
||||
"max_startups": "MaxStartups",
|
||||
"permit_tunnel": "PermitTunnel",
|
||||
"chroot_directory": "ChrootDirectory",
|
||||
"version_addendum": "VersionAddendum",
|
||||
"banner": "Banner",
|
||||
"accept_env": "AcceptEnv",
|
||||
"subsystem": "Subsystem",
|
||||
"match_users": "Match",
|
||||
# ssh_config
|
||||
"hash_known_hosts": "HashKnownHosts",
|
||||
"send_env": "SendEnv",
|
||||
# "": "",
|
||||
}
|
||||
|
||||
if isinstance(obj, dict):
|
||||
# Ersetze die Keys und rufe rekursiv für die Werte auf
|
||||
return {key_map.get(k, k): replace_keys(v) for k, v in obj.items()}
|
||||
elif isinstance(obj, list):
|
||||
# Falls es eine Liste ist, rekursiv die Elemente bearbeiten
|
||||
return [replace_keys(item) for item in obj]
|
||||
else:
|
||||
return obj
|
||||
|
||||
# Ersetze die Keys im geladenen YAML
|
||||
result = replace_keys(data)
|
||||
|
||||
display.v(f"= result: {result}")
|
||||
|
||||
return result
|
||||
|
||||
def __sort_list(self, _list, _filter):
|
||||
return sorted(_list, key=lambda k: k.get(_filter))
|
||||
|
||||
def __search(self, d, name):
|
||||
res = None
|
||||
for sub in d:
|
||||
if sub["host"] == name:
|
||||
res = sub
|
||||
break
|
||||
|
||||
return res
|
||||
|
|
@ -1,79 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2022-2024, Bodo Schulz <bodo@boone-schulz.de>
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
""" """
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
"support_tls": self.support_tls,
|
||||
"tls_directory": self.tls_directory,
|
||||
}
|
||||
|
||||
def support_tls(self, data):
|
||||
"""
|
||||
collabora_config:
|
||||
ssl:
|
||||
enabled: true
|
||||
cert_file: /etc/coolwsd/cert.pem
|
||||
key_file: /etc/coolwsd/key.pem
|
||||
ca_file: /etc/coolwsd/ca-chain.cert.pem
|
||||
storage:
|
||||
ssl:
|
||||
enabled: ""
|
||||
cert_file: /etc/coolwsd/cert.pem
|
||||
key_file: /etc/coolwsd/key.pem
|
||||
ca_file: /etc/coolwsd/ca-chain.cert.pem
|
||||
"""
|
||||
display.vv(f"bodsch.core.support_tls({data})")
|
||||
|
||||
ssl_data = data.get("ssl", {})
|
||||
|
||||
ssl_enabled = ssl_data.get("enabled", None)
|
||||
ssl_ca = ssl_data.get("ca_file", None)
|
||||
ssl_cert = ssl_data.get("cert_file", None)
|
||||
ssl_key = ssl_data.get("key_file", None)
|
||||
|
||||
if ssl_enabled and ssl_ca and ssl_cert and ssl_key:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def tls_directory(self, data):
|
||||
""" """
|
||||
display.vv(f"bodsch.core.tls_directory({data})")
|
||||
|
||||
directory = []
|
||||
|
||||
ssl_data = data.get("ssl", {})
|
||||
|
||||
ssl_ca = ssl_data.get("ca_file", None)
|
||||
ssl_cert = ssl_data.get("cert_file", None)
|
||||
ssl_key = ssl_data.get("key_file", None)
|
||||
|
||||
if ssl_ca and ssl_cert and ssl_key:
|
||||
directory.append(os.path.dirname(ssl_ca))
|
||||
directory.append(os.path.dirname(ssl_cert))
|
||||
directory.append(os.path.dirname(ssl_key))
|
||||
|
||||
directory = list(set(directory))
|
||||
|
||||
if len(directory) == 1:
|
||||
result = directory[0]
|
||||
|
||||
display.vv(f" = {result}")
|
||||
|
||||
return result
|
||||
|
|
@ -1,138 +0,0 @@
|
|||
# python 3 headers, required if submitting to Ansible
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from ansible.plugins.test.core import version_compare
|
||||
from ansible.utils.display import Display
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
""" """
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
"get_service": self.get_service,
|
||||
"log_directories": self.log_directories,
|
||||
"syslog_network_definition": self.syslog_network_definition,
|
||||
"verify_syslog_options": self.verify_syslog_options,
|
||||
}
|
||||
|
||||
def get_service(self, data, search_for):
|
||||
""" """
|
||||
display.vv(f"bodsch.core.get_service(self, {data}, {search_for})")
|
||||
|
||||
name = None
|
||||
regex_list_compiled = re.compile(f"^{search_for}.*")
|
||||
|
||||
match = {k: v for k, v in data.items() if re.match(regex_list_compiled, k)}
|
||||
|
||||
# display.vv(f"found: {match} {type(match)} {len(match)}")
|
||||
|
||||
if isinstance(match, dict) and len(match) > 0:
|
||||
values = list(match.values())[0]
|
||||
name = values.get("name", search_for).replace(".service", "")
|
||||
|
||||
# display.vv(f"= result {name}")
|
||||
return name
|
||||
|
||||
def log_directories(self, data, base_directory):
|
||||
"""
|
||||
return a list of directories
|
||||
"""
|
||||
display.vv(f"bodsch.core.log_directories(self, {data}, {base_directory})")
|
||||
|
||||
log_dirs = []
|
||||
log_files = sorted(
|
||||
[v.get("file_name") for k, v in data.items() if v.get("file_name")]
|
||||
)
|
||||
unique = list(dict.fromkeys(log_files))
|
||||
for d in unique:
|
||||
if "/$" in d:
|
||||
clean_dir_name = d.split("/$")[0]
|
||||
log_dirs.append(clean_dir_name)
|
||||
|
||||
unique_dirs = list(dict.fromkeys(log_dirs))
|
||||
|
||||
log_dirs = []
|
||||
|
||||
for file_name in unique_dirs:
|
||||
full_file_name = os.path.join(base_directory, file_name)
|
||||
log_dirs.append(full_file_name)
|
||||
|
||||
# display.v(f"= result {log_dirs}")
|
||||
return log_dirs
|
||||
|
||||
def validate_syslog_destination(self, data):
|
||||
""" """
|
||||
pass
|
||||
|
||||
def syslog_network_definition(self, data, conf_type="source"):
|
||||
""" """
|
||||
display.vv(f"bodsch.core.syslog_network_definition({data}, {conf_type})")
|
||||
|
||||
def as_boolean(value):
|
||||
return "yes" if value else "no"
|
||||
|
||||
def as_string(value):
|
||||
return f'"{value}"'
|
||||
|
||||
def as_list(value):
|
||||
return ", ".join(value)
|
||||
|
||||
res = {}
|
||||
if isinstance(data, dict):
|
||||
|
||||
for key, value in data.items():
|
||||
if key == "ip":
|
||||
if conf_type == "source":
|
||||
res = dict(ip=f"({value})")
|
||||
else:
|
||||
res = dict(ip=f'"{value}"')
|
||||
else:
|
||||
if isinstance(value, bool):
|
||||
value = f"({as_boolean(value)})"
|
||||
elif isinstance(value, str):
|
||||
value = f"({as_string(value)})"
|
||||
elif isinstance(value, int):
|
||||
value = f"({value})"
|
||||
elif isinstance(value, list):
|
||||
value = f"({as_list(value)})"
|
||||
elif isinstance(value, dict):
|
||||
value = self.syslog_network_definition(value, conf_type)
|
||||
|
||||
res.update({key: value})
|
||||
|
||||
if isinstance(data, str):
|
||||
res = data
|
||||
|
||||
# display.v(f"= res {res}")
|
||||
return res
|
||||
|
||||
def verify_syslog_options(self, data, version):
|
||||
""" """
|
||||
display.vv(f"bodsch.core.verify_syslog_options({data}, {version})")
|
||||
|
||||
if version_compare(str(version), "4.1", ">="):
|
||||
if data.get("stats_freq") is not None:
|
||||
stats_freq = data.pop("stats_freq")
|
||||
"""
|
||||
obsoleted keyword, please update your configuration; keyword='stats_freq'
|
||||
change='Use the stats() block. E.g. stats(freq(1));
|
||||
"""
|
||||
# sicherstellen, dass 'stats' ein dict ist
|
||||
if not isinstance(data.get("stats"), dict):
|
||||
data["stats"] = {}
|
||||
|
||||
data["stats"]["freq"] = stats_freq
|
||||
|
||||
if version_compare(str(version), "4.1", "<"):
|
||||
data.pop("stats", None) # kein KeyError
|
||||
|
||||
# display.v(f"= result {data}")
|
||||
return data
|
||||
|
|
@ -1,121 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
# filter_plugins/var_type.py
|
||||
from collections.abc import Mapping, Sequence
|
||||
from collections.abc import Set as ABCSet
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
# optional: we vermeiden harte Abhängigkeit von Ansible, behandeln aber deren Wrapper als str
|
||||
_STR_WRAPPERS = {"AnsibleUnsafeText", "AnsibleUnicode", "AnsibleVaultEncryptedUnicode"}
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {
|
||||
"type": self.var_type,
|
||||
"config_bool": self.config_bool_as_string,
|
||||
"string_to_list": self.string_to_list,
|
||||
}
|
||||
|
||||
def var_type(self, value):
|
||||
"""
|
||||
Liefert kanonische Python-Typnamen: str, int, float, bool, list, tuple, set, dict, NoneType.
|
||||
Fällt bei fremden/Wrapper-Typen auf die jeweilige ABC-Kategorie zurück.
|
||||
"""
|
||||
# None
|
||||
if value is None:
|
||||
return "NoneType"
|
||||
|
||||
t = type(value)
|
||||
|
||||
# String-ähnliche Wrapper (z.B. AnsibleUnsafeText)
|
||||
if isinstance(value, str) or t.__name__ in _STR_WRAPPERS:
|
||||
return "string"
|
||||
|
||||
# Bytes
|
||||
if isinstance(value, bytes):
|
||||
return "bytes"
|
||||
if isinstance(value, bytearray):
|
||||
return "bytearray"
|
||||
|
||||
# Bool vor int (bool ist Subklasse von int)
|
||||
if isinstance(value, bool):
|
||||
return "bool"
|
||||
|
||||
# Grundtypen
|
||||
if isinstance(value, int):
|
||||
return "int"
|
||||
if isinstance(value, float):
|
||||
return "float"
|
||||
|
||||
# Konkrete eingebaute Container zuerst
|
||||
if isinstance(value, list):
|
||||
return "list"
|
||||
if isinstance(value, tuple):
|
||||
return "tuple"
|
||||
if isinstance(value, set):
|
||||
return "set"
|
||||
if isinstance(value, dict):
|
||||
return "dict"
|
||||
|
||||
# ABC-Fallbacks für Wrapper (z.B. _AnsibleLazyTemplateList, AnsibleMapping ...)
|
||||
if isinstance(value, Mapping):
|
||||
return "dict"
|
||||
if isinstance(value, ABCSet):
|
||||
return "set"
|
||||
if isinstance(value, Sequence) and not isinstance(
|
||||
value, (str, bytes, bytearray)
|
||||
):
|
||||
# Unbekannte sequenzartige Wrapper -> als list behandeln
|
||||
return "list"
|
||||
|
||||
# Letzter Ausweg: konkreter Klassenname
|
||||
return t.__name__
|
||||
|
||||
def config_bool_as_string(self, data, true_as="yes", false_as="no"):
|
||||
"""
|
||||
return string for boolean
|
||||
"""
|
||||
# display.vv(f"bodsch.core.config_bool({data}, {type(data)}, {true_as}, {false_as})")
|
||||
|
||||
result = false_as
|
||||
|
||||
if isinstance(data, bool):
|
||||
result = true_as if data else false_as
|
||||
|
||||
if type(data) is None:
|
||||
result = False
|
||||
elif type(data) is bool:
|
||||
result = true_as if data else false_as
|
||||
else:
|
||||
result = data
|
||||
|
||||
return result
|
||||
|
||||
def string_to_list(self, data):
|
||||
""" """
|
||||
display.vv(f"bodsch.core.string_to_list({data})")
|
||||
|
||||
result = []
|
||||
if isinstance(data, str):
|
||||
result.append(data)
|
||||
elif isinstance(data, int):
|
||||
result.append(str(data))
|
||||
elif isinstance(data, list):
|
||||
result = data
|
||||
|
||||
display.vv(f"= result: {result}")
|
||||
|
||||
return result
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {
|
||||
"union_by": self.union,
|
||||
}
|
||||
|
||||
def union(self, data, defaults, union_by):
|
||||
"""
|
||||
union by ..
|
||||
"""
|
||||
result = []
|
||||
|
||||
if len(data) == 0:
|
||||
result = defaults
|
||||
else:
|
||||
for i in data:
|
||||
display.vv(f" - {i}")
|
||||
x = i.get(union_by, None)
|
||||
|
||||
if x:
|
||||
found = [d for d in defaults if d.get(union_by) == x]
|
||||
|
||||
if found:
|
||||
result.append(i)
|
||||
else:
|
||||
result.append(found[0])
|
||||
else:
|
||||
result.append(i)
|
||||
|
||||
display.vv(f"= {result}")
|
||||
return result
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {
|
||||
"compare_list": self.compare_list,
|
||||
"upgrade": self.upgrade,
|
||||
}
|
||||
|
||||
def compare_list(self, data_list, compare_to_list):
|
||||
"""
|
||||
compare two lists
|
||||
"""
|
||||
display.vv(f"bodsch.core.compare_list({data_list}, {compare_to_list})")
|
||||
|
||||
result = []
|
||||
|
||||
for i in data_list:
|
||||
if i in compare_to_list:
|
||||
result.append(i)
|
||||
|
||||
display.vv(f"return : {result}")
|
||||
return result
|
||||
|
||||
def upgrade(self, install_path, bin_path):
|
||||
"""
|
||||
upgrade ...
|
||||
"""
|
||||
display.vv(f"bodsch.core.upgrade({install_path}, {bin_path})")
|
||||
|
||||
directory = None
|
||||
link_to_bin = None
|
||||
|
||||
install_path_stats = install_path.get("stat", None)
|
||||
bin_path_stats = bin_path.get("stat", None)
|
||||
install_path_exists = install_path_stats.get("exists", False)
|
||||
bin_path_exists = bin_path_stats.get("exists", False)
|
||||
|
||||
if install_path_exists:
|
||||
directory = install_path_stats.get("isdir", False)
|
||||
|
||||
if bin_path_exists:
|
||||
link_to_bin = bin_path_stats.get("islnk", False)
|
||||
|
||||
if bin_path_exists and not link_to_bin:
|
||||
result = True
|
||||
elif install_path_exists and directory:
|
||||
result = False
|
||||
else:
|
||||
result = False
|
||||
|
||||
display.vv(f"return : {result}")
|
||||
return result
|
||||
|
|
@ -1,222 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# (c) 2017 Ansible Project
|
||||
# (c) 2022-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# "MODIFED WITH https://github.com/philfry/ansible/blob/37c616dc76d9ebc3cbf0285a22e55f0e4db4185e/lib/ansible/plugins/lookup/fileglob.py"
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
# from ansible.utils.listify import listify_lookup_plugin_terms as listify
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.utils.display import Display
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = """
|
||||
name: fileglob
|
||||
author: Bodo Schulz
|
||||
version_added: "1.0.4"
|
||||
short_description: list files matching a pattern
|
||||
description:
|
||||
- Find all files in a directory tree that match a pattern (recursively).
|
||||
options:
|
||||
_terms:
|
||||
required: False
|
||||
description: File extension on which a comparison is to take place.
|
||||
type: str
|
||||
search_path:
|
||||
required: False
|
||||
description: A list of additional directories to be searched.
|
||||
type: list
|
||||
default: []
|
||||
version_added: "1.0.4"
|
||||
notes:
|
||||
- Patterns are only supported on files, not directory/paths.
|
||||
- Matching is against local system files on the Ansible controller.
|
||||
To iterate a list of files on a remote node, use the M(ansible.builtin.find) module.
|
||||
- Returns a string list of paths joined by commas, or an empty list if no files match. For a 'true list' pass C(wantlist=True) to the lookup.
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Display paths of all .tpl files
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ lookup('bodsch.core.file_glob', '.tpl') }}"
|
||||
|
||||
- name: Show paths of all .tpl files, extended by further directories
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ lookup('bodsch.core.file_glob', '.tpl') }}"
|
||||
vars:
|
||||
search_path:
|
||||
- ".."
|
||||
- "../.."
|
||||
|
||||
- name: Copy each file over that matches the given pattern
|
||||
ansible.builtin.copy:
|
||||
src: "{{ item }}"
|
||||
dest: "/etc/fooapp/"
|
||||
owner: "root"
|
||||
mode: 0600
|
||||
with_file_glob:
|
||||
- "*.tmpl"
|
||||
|
||||
- name: Copy each template over that matches the given pattern
|
||||
ansible.builtin.copy:
|
||||
src: "{{ item }}"
|
||||
dest: "/etc/alertmanager/templates/"
|
||||
owner: "root"
|
||||
mode: 0640
|
||||
with_file_glob:
|
||||
- ".tmpl"
|
||||
vars:
|
||||
search_path:
|
||||
- ".."
|
||||
- "../.."
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
_list:
|
||||
description:
|
||||
- list of files
|
||||
type: list
|
||||
elements: path
|
||||
"""
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class LookupModule(LookupBase):
|
||||
"""
|
||||
Ansible lookup plugin that finds files matching an extension in role
|
||||
or playbook search paths.
|
||||
|
||||
The plugin:
|
||||
* Resolves search locations based on Ansible's search paths and optional
|
||||
user-specified paths.
|
||||
* Recursively walks the "templates" and "files" directories.
|
||||
* Returns a flat list of matching file paths.
|
||||
"""
|
||||
|
||||
def __init__(self, basedir: Optional[str] = None, **kwargs: Any) -> None:
|
||||
"""
|
||||
Initialize the lookup module.
|
||||
|
||||
The base directory is stored for potential use by Ansible's lookup base
|
||||
mechanisms.
|
||||
|
||||
Args:
|
||||
basedir: Optional base directory for lookups, usually supplied by Ansible.
|
||||
**kwargs: Additional keyword arguments passed from Ansible.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.basedir = basedir
|
||||
|
||||
def run(
|
||||
self,
|
||||
terms: List[str],
|
||||
variables: Optional[Dict[str, Any]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Execute the fileglob lookup.
|
||||
|
||||
For each term (interpreted as a file extension), this method searches
|
||||
recursively under all derived search paths and returns a flattened list
|
||||
of matching file paths.
|
||||
|
||||
Args:
|
||||
terms: A list of file extensions or patterns (e.g. ['.tpl']).
|
||||
variables: The Ansible variable context, used to determine
|
||||
- ansible_search_path
|
||||
- role_path
|
||||
- search_path (custom additional paths)
|
||||
- search_regex (optional filename regex filter)
|
||||
**kwargs: Additional lookup options, passed through to set_options().
|
||||
|
||||
Returns:
|
||||
list[str]: A list containing the full paths of all files matching
|
||||
the provided extensions within the resolved search directories.
|
||||
"""
|
||||
display.vv(f"run({terms}, variables, {kwargs})")
|
||||
self.set_options(direct=kwargs)
|
||||
|
||||
paths: List[str] = []
|
||||
ansible_search_path = variables.get("ansible_search_path", None)
|
||||
role_path = variables.get("role_path")
|
||||
lookup_search_path = variables.get("search_path", None)
|
||||
lookup_search_regex = variables.get("search_regex", None)
|
||||
|
||||
if ansible_search_path:
|
||||
paths = ansible_search_path
|
||||
else:
|
||||
paths.append(self.get_basedir(variables))
|
||||
|
||||
if lookup_search_path:
|
||||
if isinstance(lookup_search_path, list):
|
||||
for p in lookup_search_path:
|
||||
paths.append(os.path.join(role_path, p))
|
||||
|
||||
search_path = ["templates", "files"]
|
||||
|
||||
ret: List[str] = []
|
||||
found_files: List[List[str]] = []
|
||||
|
||||
for term in terms:
|
||||
""" """
|
||||
for p in paths:
|
||||
for sp in search_path:
|
||||
path = os.path.join(p, sp)
|
||||
display.vv(f" - lookup in directory: {path}")
|
||||
r = self._find_recursive(
|
||||
folder=path, extension=term, search_regex=lookup_search_regex
|
||||
)
|
||||
# display.vv(f" found: {r}")
|
||||
if len(r) > 0:
|
||||
found_files.append(r)
|
||||
|
||||
ret = self._flatten(found_files)
|
||||
|
||||
return ret
|
||||
|
||||
def _find_recursive(
|
||||
self,
|
||||
folder: str,
|
||||
extension: str,
|
||||
search_regex: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Recursively search for files in the given folder that match an extension
|
||||
and an optional regular expression.
|
||||
|
||||
Args:
|
||||
folder: The root directory to walk recursively.
|
||||
extension: The file extension to match (e.g. ".tpl").
|
||||
search_regex: Optional regular expression string. If provided, only
|
||||
filenames matching this regex are included.
|
||||
|
||||
Returns:
|
||||
list[str]: A list containing the full paths of matching files found
|
||||
under the given folder. If no files match, an empty list is returned.
|
||||
"""
|
||||
# display.vv(f"_find_recursive({folder}, {extension}, {search_regex})")
|
||||
matches: List[str] = []
|
||||
|
||||
for root, dirnames, filenames in os.walk(folder):
|
||||
for filename in filenames:
|
||||
if filename.endswith(extension):
|
||||
if search_regex:
|
||||
reg = re.compile(search_regex)
|
||||
if reg.match(filename):
|
||||
matches.append(os.path.join(root, filename))
|
||||
else:
|
||||
matches.append(os.path.join(root, filename))
|
||||
|
||||
return matches
|
||||
|
|
@ -1,463 +0,0 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
"""
|
||||
Ansible lookup plugin to read secrets from Vaultwarden using the rbw CLI.
|
||||
|
||||
This module provides the `LookupModule` class, which integrates the `rbw`
|
||||
command line client into Ansible as a lookup plugin. It supports optional
|
||||
index-based lookups, JSON parsing of secrets, and on-disk caching for both
|
||||
the rbw index and retrieved secrets.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
||||
DOCUMENTATION = """
|
||||
lookup: rbw
|
||||
author:
|
||||
- Bodo 'bodsch' (@bodsch)
|
||||
version_added: "1.0.0"
|
||||
short_description: Read secrets from Vaultwarden via the rbw CLI
|
||||
description:
|
||||
- This lookup plugin retrieves entries from Vaultwarden using the 'rbw' CLI client.
|
||||
- It supports selecting specific fields, optional JSON parsing, and structured error handling.
|
||||
- Supports index-based lookups for disambiguation by name/folder/user.
|
||||
options:
|
||||
_terms:
|
||||
description:
|
||||
- The Vault entry to retrieve, specified by path, name, or UUID.
|
||||
required: true
|
||||
field:
|
||||
description:
|
||||
- Optional field within the entry to return (e.g., username, password).
|
||||
required: false
|
||||
type: str
|
||||
parse_json:
|
||||
description:
|
||||
- If set to true, the returned value will be parsed as JSON.
|
||||
required: false
|
||||
type: bool
|
||||
default: false
|
||||
strict_json:
|
||||
description:
|
||||
- If true and parse_json is enabled, invalid JSON will raise an error.
|
||||
- If false, invalid JSON will return an empty dictionary.
|
||||
required: false
|
||||
type: bool
|
||||
default: false
|
||||
use_index:
|
||||
description:
|
||||
- If true, the index will be used to map name/folder/user to a unique id.
|
||||
required: false
|
||||
type: bool
|
||||
default: false
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Read a password from Vault by UUID
|
||||
debug:
|
||||
msg: "{{ lookup('bodsch.core.rbw', '0123-uuid-4567', field='password') }}"
|
||||
|
||||
- name: Read a password using index
|
||||
debug:
|
||||
msg: "{{ lookup('bodsch.core.rbw',
|
||||
{'name': 'expresszuschnitt.de', 'folder': '.immowelt.de', 'user': 'immo@boone-schulz.de'},
|
||||
field='password',
|
||||
use_index=True) }}"
|
||||
|
||||
- name: Multi-fetch
|
||||
set_fact:
|
||||
multi: "{{ lookup('bodsch.core.rbw',
|
||||
[{'name': 'foo', 'folder': '', 'user': ''}, 'some-uuid'],
|
||||
field='username',
|
||||
use_index=True) }}"
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
_raw:
|
||||
description:
|
||||
- The raw value from the Vault entry, either as a string or dictionary (if parse_json is true).
|
||||
type: raw
|
||||
"""
|
||||
|
||||
|
||||
class LookupModule(LookupBase):
|
||||
"""
|
||||
Ansible lookup module for retrieving secrets from Vaultwarden via the rbw CLI.
|
||||
|
||||
The plugin supports:
|
||||
* Lookup by UUID or by a combination of name, folder, and user.
|
||||
* Optional index-based resolution to derive a stable entry ID.
|
||||
* On-disk caching of both the rbw index and individual lookups.
|
||||
* Optional JSON parsing of retrieved secret values.
|
||||
|
||||
Attributes:
|
||||
CACHE_TTL (int): Time-to-live for cache entries in seconds.
|
||||
cache_directory (str): Base directory path for index and value caches.
|
||||
"""
|
||||
|
||||
CACHE_TTL = 300 # 5 Minuten
|
||||
cache_directory = f"{Path.home()}/.cache/ansible/lookup/rbw"
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""
|
||||
Initialize the lookup module and ensure the cache directory exists.
|
||||
|
||||
Args:
|
||||
*args: Positional arguments passed through to the parent class.
|
||||
**kwargs: Keyword arguments passed through to the parent class.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
super(LookupModule, self).__init__(*args, **kwargs)
|
||||
if not os.path.exists(self.cache_directory):
|
||||
os.makedirs(self.cache_directory, exist_ok=True)
|
||||
|
||||
def run(self, terms, variables=None, **kwargs) -> List[Any]:
|
||||
"""
|
||||
Execute the lookup and return the requested values.
|
||||
|
||||
This method is called by Ansible when the lookup plugin is used. It
|
||||
resolves each term into an rbw entry ID (optionally using the index),
|
||||
retrieves and caches the value, and optionally parses the value as JSON.
|
||||
|
||||
Args:
|
||||
terms: A list of lookup terms. Each term can be either:
|
||||
* A string representing an entry ID or name.
|
||||
* A dict with keys "name", "folder", and "user" for index-based lookup.
|
||||
variables: Ansible variables (unused, but part of the standard interface).
|
||||
**kwargs: Additional keyword arguments:
|
||||
* field (str): Optional field within the entry to return.
|
||||
* parse_json (bool): Whether to parse the result as JSON.
|
||||
* strict_json (bool): If True, invalid JSON raises an error.
|
||||
* use_index (bool): If True, resolve name/folder/user via rbw index.
|
||||
|
||||
Returns:
|
||||
list: A list of values corresponding to the supplied terms. Each element
|
||||
is either:
|
||||
* A string (raw secret) when parse_json is False.
|
||||
* A dict (parsed JSON) when parse_json is True.
|
||||
|
||||
Raises:
|
||||
AnsibleError: If input terms are invalid, the index lookup fails,
|
||||
the rbw command fails, or JSON parsing fails in strict mode.
|
||||
"""
|
||||
display.v(f"run(terms={terms}, kwargs={kwargs})")
|
||||
|
||||
if not terms or not isinstance(terms, list) or not terms[0]:
|
||||
raise AnsibleError("At least one Vault entry must be specified.")
|
||||
|
||||
field = kwargs.get("field", "").strip()
|
||||
parse_json = kwargs.get("parse_json", False)
|
||||
strict_json = kwargs.get("strict_json", False)
|
||||
use_index = kwargs.get("use_index", False)
|
||||
|
||||
index_data: Optional[Dict[str, Any]] = None
|
||||
if use_index:
|
||||
index_data = self._read_index()
|
||||
if index_data is None:
|
||||
index_data = self._fetch_index()
|
||||
display.v(f"Index has {len(index_data['entries'])} entries")
|
||||
|
||||
results: List[Any] = []
|
||||
|
||||
for term in terms:
|
||||
if isinstance(term, dict):
|
||||
name = term.get("name", "").strip()
|
||||
folder = term.get("folder", "").strip()
|
||||
user = term.get("user", "").strip()
|
||||
raw_entry = f"{name}|{folder}|{user}"
|
||||
else:
|
||||
name = term.strip()
|
||||
folder = ""
|
||||
user = ""
|
||||
raw_entry = name
|
||||
|
||||
if not name:
|
||||
continue
|
||||
|
||||
entry_id = name # fallback: use directly
|
||||
|
||||
if index_data:
|
||||
matches = [
|
||||
e
|
||||
for e in index_data["entries"]
|
||||
if e["name"] == name
|
||||
and (not folder or e["folder"] == folder)
|
||||
and (not user or e["user"] == user)
|
||||
]
|
||||
|
||||
if not matches:
|
||||
raise AnsibleError(
|
||||
f"No matching entry found in index for: {raw_entry}"
|
||||
)
|
||||
|
||||
if len(matches) > 1:
|
||||
raise AnsibleError(
|
||||
f"Multiple matches found in index for: {raw_entry}"
|
||||
)
|
||||
|
||||
entry_id = matches[0]["id"]
|
||||
display.v(f"Resolved {raw_entry} → id={entry_id}")
|
||||
|
||||
cache_key = self._cache_key(entry_id, field)
|
||||
cached = self._read_cache(cache_key)
|
||||
|
||||
if cached is not None:
|
||||
value = cached
|
||||
display.v(f"Cache HIT for {entry_id}")
|
||||
else:
|
||||
value = self._fetch_rbw(entry_id, field)
|
||||
self._write_cache(cache_key, value)
|
||||
display.v(f"Cache MISS for {entry_id} — fetched with rbw")
|
||||
|
||||
if parse_json:
|
||||
try:
|
||||
results.append(json.loads(value))
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
if strict_json:
|
||||
raise AnsibleError(
|
||||
f"JSON parsing failed for entry '{entry_id}': {e}"
|
||||
)
|
||||
else:
|
||||
display.v(
|
||||
f"Warning: Content of '{entry_id}' is not valid JSON."
|
||||
)
|
||||
results.append({})
|
||||
except Exception as e:
|
||||
raise AnsibleError(f"Unexpected error parsing '{entry_id}': {e}")
|
||||
else:
|
||||
results.append(value)
|
||||
|
||||
return results
|
||||
|
||||
def _fetch_rbw(self, entry_id: str, field: str) -> str:
|
||||
"""
|
||||
Call the rbw CLI to retrieve a specific entry or entry field.
|
||||
|
||||
Args:
|
||||
entry_id: The rbw entry identifier (UUID or resolved ID from index).
|
||||
field: Optional field name to retrieve (e.g. "username", "password").
|
||||
If empty, the default value for the entry is returned.
|
||||
|
||||
Returns:
|
||||
str: The trimmed stdout of the rbw command, representing the secret value.
|
||||
|
||||
Raises:
|
||||
AnsibleError: If the rbw command exits with a non-zero status.
|
||||
"""
|
||||
cmd = ["rbw", "get"]
|
||||
if field:
|
||||
cmd.extend(["--field", field])
|
||||
cmd.append(entry_id)
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
return result.stdout.strip()
|
||||
except subprocess.CalledProcessError as e:
|
||||
err_msg = e.stderr.strip() or e.stdout.strip()
|
||||
raise AnsibleError(f"Error retrieving Vault entry '{entry_id}': {err_msg}")
|
||||
|
||||
def _fetch_index(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Fetch the rbw index and persist it in the local cache.
|
||||
|
||||
The index contains a list of entries, each with id, user, name, and folder.
|
||||
It is stored on disk together with a timestamp and used for subsequent
|
||||
lookups until it expires.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary with:
|
||||
* "timestamp" (float): Unix timestamp when the index was fetched.
|
||||
* "entries" (list[dict]): List of index entries.
|
||||
|
||||
Raises:
|
||||
AnsibleError: If the rbw index command fails.
|
||||
"""
|
||||
cmd = ["rbw", "list", "--fields", "id,user,name,folder"]
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
lines = [
|
||||
line.strip() for line in result.stdout.splitlines() if line.strip()
|
||||
]
|
||||
|
||||
headers = ["id", "user", "name", "folder"]
|
||||
|
||||
entries: List[Dict[str, str]] = []
|
||||
for line in lines:
|
||||
parts = line.split("\t")
|
||||
if len(parts) < len(headers):
|
||||
parts += [""] * (len(headers) - len(parts))
|
||||
entry = dict(zip(headers, parts))
|
||||
entries.append(entry)
|
||||
|
||||
index_payload: Dict[str, Any] = {
|
||||
"timestamp": time.time(),
|
||||
"entries": entries,
|
||||
}
|
||||
|
||||
self._write_index(index_payload)
|
||||
return index_payload
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
err_msg = e.stderr.strip() or e.stdout.strip()
|
||||
raise AnsibleError(f"Error retrieving rbw index: {err_msg}")
|
||||
|
||||
def _index_path(self) -> str:
|
||||
"""
|
||||
Compute the absolute file path of the index cache.
|
||||
|
||||
Returns:
|
||||
str: The full path to the index cache file.
|
||||
"""
|
||||
return os.path.join(self.cache_directory, "index.json")
|
||||
|
||||
def _read_index(self) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Read the rbw index from the cache if it exists and is still valid.
|
||||
|
||||
The index is considered valid if its age is less than or equal to
|
||||
CACHE_TTL. If the index is expired or cannot be read, it is removed.
|
||||
|
||||
Returns:
|
||||
dict | None: The cached index payload if available and not expired,
|
||||
otherwise None.
|
||||
|
||||
"""
|
||||
path = self._index_path()
|
||||
if not os.path.exists(path):
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
payload = json.load(f)
|
||||
age = time.time() - payload["timestamp"]
|
||||
if age <= self.CACHE_TTL:
|
||||
return payload
|
||||
else:
|
||||
os.remove(path)
|
||||
return None
|
||||
except Exception as e:
|
||||
display.v(f"Index cache read error: {e}")
|
||||
return None
|
||||
|
||||
def _write_index(self, index_payload: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Persist the rbw index payload to disk.
|
||||
|
||||
Args:
|
||||
index_payload: The payload containing the index data and timestamp.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
path = self._index_path()
|
||||
try:
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(index_payload, f)
|
||||
except Exception as e:
|
||||
display.v(f"Index cache write error: {e}")
|
||||
|
||||
def _cache_key(self, entry_id: str, field: str) -> str:
|
||||
"""
|
||||
Create a deterministic cache key for a given entry and field.
|
||||
|
||||
Args:
|
||||
entry_id: The rbw entry identifier.
|
||||
field: The requested field name. May be an empty string.
|
||||
|
||||
Returns:
|
||||
str: A SHA-256 hash hex digest representing the cache key.
|
||||
"""
|
||||
raw_key = f"{entry_id}|{field}".encode("utf-8")
|
||||
return hashlib.sha256(raw_key).hexdigest()
|
||||
|
||||
def _cache_path(self, key: str) -> str:
|
||||
"""
|
||||
Compute the absolute file path for a given cache key.
|
||||
|
||||
Args:
|
||||
key: The cache key as returned by `_cache_key`.
|
||||
|
||||
Returns:
|
||||
str: The full path to the cache file for the given key.
|
||||
"""
|
||||
return os.path.join(self.cache_directory, key + ".json")
|
||||
|
||||
def _read_cache(self, key: str) -> Optional[str]:
|
||||
"""
|
||||
Read a cached value for the given key if present and not expired.
|
||||
|
||||
The cache entry is considered valid if its age is less than or equal to
|
||||
CACHE_TTL. If the entry is expired or cannot be read, it is removed.
|
||||
|
||||
Args:
|
||||
key: The cache key as returned by `_cache_key`.
|
||||
|
||||
Returns:
|
||||
str | None: The cached value if present and not expired,
|
||||
otherwise None.
|
||||
"""
|
||||
path = self._cache_path(key)
|
||||
if not os.path.exists(path):
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
payload = json.load(f)
|
||||
age = time.time() - payload["timestamp"]
|
||||
if age <= self.CACHE_TTL:
|
||||
return payload["value"]
|
||||
else:
|
||||
os.remove(path)
|
||||
return None
|
||||
except Exception as e:
|
||||
display.v(f"Cache read error for key {key}: {e}")
|
||||
return None
|
||||
|
||||
def _write_cache(self, key: str, value: str) -> None:
|
||||
"""
|
||||
Write a value to the cache using the given key.
|
||||
|
||||
Args:
|
||||
key: The cache key as returned by `_cache_key`.
|
||||
value: The value to be cached, typically the raw secret string.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
path = self._cache_path(key)
|
||||
payload = {
|
||||
"timestamp": time.time(),
|
||||
"value": value,
|
||||
}
|
||||
try:
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(payload, f)
|
||||
except Exception as e:
|
||||
display.v(f"Cache write error for key {key}: {e}")
|
||||
|
|
@ -1,113 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2025, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import datetime
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def cache_valid_old(
|
||||
module, cache_file_name, cache_minutes=60, cache_file_remove=True
|
||||
) -> bool:
|
||||
"""
|
||||
read local file and check the creation time against local time
|
||||
|
||||
returns 'False' when cache are out of sync
|
||||
"""
|
||||
out_of_cache = False
|
||||
|
||||
if os.path.isfile(cache_file_name):
|
||||
module.debug(msg=f"read cache file '{cache_file_name}'")
|
||||
now = datetime.datetime.now()
|
||||
creation_time = datetime.datetime.fromtimestamp(
|
||||
os.path.getctime(cache_file_name)
|
||||
)
|
||||
diff = now - creation_time
|
||||
# define the difference from now to the creation time in minutes
|
||||
cached_time = diff.total_seconds() / 60
|
||||
out_of_cache = cached_time > cache_minutes
|
||||
|
||||
module.debug(msg=f" - now {now}")
|
||||
module.debug(msg=f" - creation_time {creation_time}")
|
||||
module.debug(msg=f" - cached since {cached_time}")
|
||||
module.debug(msg=f" - out of cache {out_of_cache}")
|
||||
|
||||
if out_of_cache and cache_file_remove:
|
||||
os.remove(cache_file_name)
|
||||
else:
|
||||
out_of_cache = True
|
||||
|
||||
module.debug(msg="cache is {0}valid".format("not " if out_of_cache else ""))
|
||||
|
||||
return out_of_cache
|
||||
|
||||
|
||||
def cache_valid(
|
||||
module: Any,
|
||||
cache_file_name: str,
|
||||
cache_minutes: int = 60,
|
||||
cache_file_remove: bool = True,
|
||||
) -> bool:
|
||||
"""
|
||||
Prüft, ob eine Cache-Datei älter als `cache_minutes` ist oder gar nicht existiert.
|
||||
|
||||
Gibt True zurück, wenn der Cache abgelaufen ist (oder nicht existiert) und
|
||||
ggf. gelöscht wurde (wenn cache_file_remove=True). Sonst False.
|
||||
|
||||
:param module: Ansible-Modulobjekt, um Debug-Logs zu schreiben.
|
||||
:param cache_file_name: Pfad zur Cache-Datei (String).
|
||||
:param cache_minutes: Maximales Alter in Minuten, danach gilt der Cache als ungültig.
|
||||
:param cache_file_remove: Ob abgelaufene Cache-Datei gelöscht werden soll.
|
||||
"""
|
||||
path = Path(cache_file_name)
|
||||
|
||||
# Existiert die Datei nicht? → Cache gilt sofort als ungültig
|
||||
if not path.is_file():
|
||||
module.debug(msg=f"Cache-Datei '{cache_file_name}' existiert nicht → ungültig")
|
||||
return True
|
||||
|
||||
try:
|
||||
# Verwende mtime (Zeitpunkt der letzten Inhaltsänderung) statt ctime,
|
||||
# denn ctime kann sich auch durch Metadaten-Änderungen verschieben.
|
||||
modification_time = datetime.datetime.fromtimestamp(path.stat().st_mtime)
|
||||
except OSError as e:
|
||||
module.debug(
|
||||
msg=f"Fehler beim Lesen der Modifikationszeit von '{cache_file_name}': {e} → Cache ungültig"
|
||||
)
|
||||
return True
|
||||
|
||||
now = datetime.datetime.now()
|
||||
diff_minutes = (now - modification_time).total_seconds() / 60
|
||||
is_expired = diff_minutes > cache_minutes
|
||||
|
||||
module.debug(
|
||||
msg=f"Cache-Datei '{cache_file_name}' gefunden. Letzte Änderung: {modification_time.isoformat()}"
|
||||
)
|
||||
module.debug(msg=f" → Jetzt: {now.isoformat()}")
|
||||
module.debug(
|
||||
msg=f" → Alter: {diff_minutes:.2f} Minuten (Limit: {cache_minutes} Minuten)"
|
||||
)
|
||||
module.debug(msg=f" → Abgelaufen: {is_expired}")
|
||||
|
||||
# Wenn abgelaufen und löschen erwünscht, versuche die Datei zu entfernen
|
||||
if is_expired and cache_file_remove:
|
||||
try:
|
||||
path.unlink()
|
||||
module.debug(
|
||||
msg=f" → Alte Cache-Datei '{cache_file_name}' wurde gelöscht."
|
||||
)
|
||||
except OSError as e:
|
||||
module.debug(
|
||||
msg=f" → Fehler beim Löschen der Cache-Datei '{cache_file_name}': {e}"
|
||||
)
|
||||
|
||||
return is_expired
|
||||
|
|
@ -1,240 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from typing import Any, Optional, Tuple
|
||||
|
||||
ChecksumValidationResult = Tuple[bool, str, Optional[str]]
|
||||
ChecksumValidationFromFileResult = Tuple[bool, Optional[str], str]
|
||||
|
||||
|
||||
class Checksum:
|
||||
"""
|
||||
Helper class for calculating and validating checksums.
|
||||
|
||||
This class is typically used in an Ansible-module context and keeps a reference
|
||||
to the calling module for optional logging.
|
||||
|
||||
Attributes:
|
||||
module: An Ansible-like module object. Currently only stored for potential logging.
|
||||
"""
|
||||
|
||||
def __init__(self, module: Any) -> None:
|
||||
"""
|
||||
Initialize the checksum helper.
|
||||
|
||||
Args:
|
||||
module: An Ansible-like module instance.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.module = module
|
||||
|
||||
def checksum(self, plaintext: Any, algorithm: str = "sha256") -> str:
|
||||
"""
|
||||
Compute a checksum for arbitrary input data.
|
||||
|
||||
The input is normalized via :meth:`_harmonize_data` and then hashed with
|
||||
the requested algorithm.
|
||||
|
||||
Args:
|
||||
plaintext: Data to hash. Commonly a string, dict, or list.
|
||||
algorithm: Hashlib algorithm name (e.g. "md5", "sha256", "sha512").
|
||||
Defaults to "sha256".
|
||||
|
||||
Returns:
|
||||
str: Hex digest of the computed checksum.
|
||||
|
||||
Raises:
|
||||
ValueError: If the hash algorithm is not supported by hashlib.
|
||||
AttributeError: If the normalized value does not support ``encode("utf-8")``.
|
||||
"""
|
||||
_data = self._harmonize_data(plaintext)
|
||||
checksum = hashlib.new(algorithm)
|
||||
checksum.update(_data.encode("utf-8"))
|
||||
|
||||
return checksum.hexdigest()
|
||||
|
||||
def validate(
|
||||
self, checksum_file: str, data: Any = None
|
||||
) -> ChecksumValidationResult:
|
||||
"""
|
||||
Validate (and optionally reset) a checksum file against given data.
|
||||
|
||||
Behavior:
|
||||
- If ``data`` is ``None`` and ``checksum_file`` exists, the checksum file is removed.
|
||||
- If ``checksum_file`` exists, its first line is treated as the previous checksum.
|
||||
- A new checksum is computed from ``data`` and compared to the previous one.
|
||||
|
||||
Args:
|
||||
checksum_file: Path to the checksum file holding a single checksum line.
|
||||
data: Input data to hash and compare. Can be string/dict/list or another type
|
||||
supported by :meth:`_harmonize_data`. If ``None``, the checksum file may be removed.
|
||||
|
||||
Returns:
|
||||
tuple[bool, str, Optional[str]]: (changed, checksum, old_checksum)
|
||||
changed: True if the checksum differs from the stored value (or no stored value exists).
|
||||
checksum: Newly computed checksum hex digest.
|
||||
old_checksum: Previously stored checksum (first line), or ``None`` if not available.
|
||||
|
||||
Raises:
|
||||
ValueError: If the hash algorithm used internally is unsupported.
|
||||
AttributeError: If the normalized data does not support ``encode("utf-8")``.
|
||||
"""
|
||||
# self.module.log(msg=f" - checksum_file '{checksum_file}'")
|
||||
old_checksum: Optional[str] = None
|
||||
|
||||
if not isinstance(data, str) or not isinstance(data, dict):
|
||||
# self.module.log(msg=f" - {type(data)} {len(data)}")
|
||||
if data is None and os.path.exists(checksum_file):
|
||||
os.remove(checksum_file)
|
||||
|
||||
if os.path.exists(checksum_file):
|
||||
with open(checksum_file, "r") as f:
|
||||
old_checksum = f.readlines()[0].strip()
|
||||
|
||||
_data = self._harmonize_data(data)
|
||||
checksum = self.checksum(_data)
|
||||
changed = not (old_checksum == checksum)
|
||||
|
||||
return (changed, checksum, old_checksum)
|
||||
|
||||
def validate_from_file(
|
||||
self, checksum_file: str, data_file: str
|
||||
) -> ChecksumValidationFromFileResult:
|
||||
"""
|
||||
Validate a checksum file against the contents of another file.
|
||||
|
||||
Behavior:
|
||||
- If ``data_file`` does not exist but ``checksum_file`` exists, the checksum file is removed.
|
||||
- If ``checksum_file`` exists, its first line is treated as the previous checksum.
|
||||
- A checksum is computed from ``data_file`` and compared to the previous one.
|
||||
|
||||
Args:
|
||||
checksum_file: Path to the checksum file holding a single checksum line.
|
||||
data_file: Path to the file whose contents should be hashed.
|
||||
|
||||
Returns:
|
||||
tuple[bool, Optional[str], str]: (changed, checksum_from_file, old_checksum)
|
||||
changed: True if the checksum differs from the stored value.
|
||||
checksum_from_file: Hex digest checksum of ``data_file`` contents, or ``None`` if
|
||||
``data_file`` is not a file.
|
||||
old_checksum: Previously stored checksum (first line), or empty string if not available.
|
||||
"""
|
||||
# self.module.log(msg=f" - checksum_file '{checksum_file}'")
|
||||
old_checksum = ""
|
||||
|
||||
if not os.path.exists(data_file) and os.path.exists(checksum_file):
|
||||
"""
|
||||
remove checksum_file, when data_file are removed
|
||||
"""
|
||||
os.remove(checksum_file)
|
||||
|
||||
if os.path.exists(checksum_file):
|
||||
with open(checksum_file, "r", encoding="utf-8") as f:
|
||||
old_checksum = f.readlines()[0].strip()
|
||||
|
||||
checksum_from_file = self.checksum_from_file(data_file)
|
||||
changed = not (old_checksum == checksum_from_file)
|
||||
|
||||
return (changed, checksum_from_file, old_checksum)
|
||||
|
||||
def checksum_from_file(
|
||||
self,
|
||||
path: str,
|
||||
read_chunksize: int = 65536,
|
||||
algorithm: str = "sha256",
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Compute checksum of a file's contents.
|
||||
|
||||
The file is read in chunks to avoid loading the full file into memory.
|
||||
A small ``time.sleep(0)`` is performed per chunk (noop in most cases).
|
||||
|
||||
Args:
|
||||
path: Path to the file.
|
||||
read_chunksize: Maximum number of bytes read at once. Defaults to 65536 (64 KiB).
|
||||
algorithm: Hash algorithm name to use. Defaults to "sha256".
|
||||
|
||||
Returns:
|
||||
Optional[str]: Hex digest string of the checksum if ``path`` is a file,
|
||||
otherwise ``None``.
|
||||
|
||||
Raises:
|
||||
ValueError: If the hash algorithm is not supported by hashlib.
|
||||
OSError: If the file cannot be opened/read.
|
||||
"""
|
||||
if os.path.isfile(path):
|
||||
checksum = hashlib.new(algorithm) # Raises appropriate exceptions.
|
||||
with open(path, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(read_chunksize), b""):
|
||||
checksum.update(chunk)
|
||||
# Release greenthread, if greenthreads are not used it is a noop.
|
||||
time.sleep(0)
|
||||
|
||||
return checksum.hexdigest()
|
||||
|
||||
return None
|
||||
|
||||
def write_checksum(self, checksum_file: str, checksum: Any) -> None:
|
||||
"""
|
||||
Write a checksum value to disk (single line with trailing newline).
|
||||
|
||||
Args:
|
||||
checksum_file: Destination path for the checksum file.
|
||||
checksum: Checksum value to write. Only written if it is truthy and its string
|
||||
representation is not empty.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
Raises:
|
||||
OSError: If the file cannot be opened/written.
|
||||
"""
|
||||
if checksum and len(str(checksum)) != 0:
|
||||
with open(checksum_file, "w", encoding="utf-8") as f:
|
||||
f.write(checksum + "\n")
|
||||
|
||||
def _harmonize_data(self, data: Any) -> Any:
|
||||
"""
|
||||
Normalize data into a stable representation for hashing.
|
||||
|
||||
Rules:
|
||||
- dict: JSON serialized with sorted keys
|
||||
- list: Concatenation of stringified elements
|
||||
- str: returned as-is
|
||||
- other: returns ``data.copy()``
|
||||
|
||||
Args:
|
||||
data: Input data.
|
||||
|
||||
Returns:
|
||||
Any: Normalized representation. For typical input types (dict/list/str) this
|
||||
is a string. For other types, the return value depends on ``data.copy()``.
|
||||
|
||||
Raises:
|
||||
AttributeError: If ``data`` is not dict/list/str and does not implement ``copy()``.
|
||||
TypeError: If JSON serialization fails for dictionaries.
|
||||
"""
|
||||
# self.module.log(msg=f" - type before: '{type(data)}'")
|
||||
if isinstance(data, dict):
|
||||
_data = json.dumps(data, sort_keys=True)
|
||||
elif isinstance(data, list):
|
||||
_data = "".join(str(x) for x in data)
|
||||
elif isinstance(data, str):
|
||||
_data = data
|
||||
else:
|
||||
_data = data.copy()
|
||||
|
||||
# self.module.log(msg=f" - type after : '{type(_data)}'")
|
||||
return _data
|
||||
|
|
@ -1,646 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
try:
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.x509.oid import ExtensionOID
|
||||
except ImportError as exc: # pragma: no cover
|
||||
raise RuntimeError(
|
||||
"The 'cryptography' Python library is required to use crypto_utils"
|
||||
) from exc
|
||||
|
||||
|
||||
class OpenSSLObjectError(Exception):
|
||||
"""
|
||||
Einfacher Fehler-Typ, um Parsing-/Krypto-Probleme konsistent zu signalisieren.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Hilfsfunktionen für Zeitverarbeitung
|
||||
# ======================================================================
|
||||
|
||||
_ASN1_TIME_FORMAT = "%Y%m%d%H%M%SZ"
|
||||
|
||||
|
||||
def _to_utc_naive(dt: datetime) -> datetime:
|
||||
"""
|
||||
Konvertiert ein datetime-Objekt nach UTC und entfernt tzinfo.
|
||||
Naive Datumswerte werden als UTC interpretiert.
|
||||
"""
|
||||
if dt.tzinfo is None:
|
||||
return dt.replace(tzinfo=None)
|
||||
return dt.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
|
||||
|
||||
def _format_asn1_time(dt: Optional[datetime]) -> Optional[str]:
|
||||
"""
|
||||
datetime -> ASN.1 TIME (YYYYMMDDHHMMSSZ) oder None.
|
||||
"""
|
||||
if dt is None:
|
||||
return None
|
||||
dt_utc_naive = _to_utc_naive(dt)
|
||||
return dt_utc_naive.strftime(_ASN1_TIME_FORMAT)
|
||||
|
||||
|
||||
def _parse_asn1_time(value: str, input_name: str) -> datetime:
|
||||
"""
|
||||
ASN.1 TIME (YYYYMMDDHHMMSSZ) -> datetime (naiv, UTC).
|
||||
"""
|
||||
try:
|
||||
dt = datetime.strptime(value, _ASN1_TIME_FORMAT)
|
||||
except ValueError as exc:
|
||||
raise OpenSSLObjectError(
|
||||
f"{input_name!r} is not a valid ASN.1 TIME value: {value!r}"
|
||||
) from exc
|
||||
return dt
|
||||
|
||||
|
||||
def _parse_relative_spec(spec: str, input_name: str) -> timedelta:
|
||||
"""
|
||||
Parsen des relativen Formats (z.B. +32w1d2h3m4s) in ein timedelta.
|
||||
|
||||
Unterstützte Einheiten:
|
||||
- w: Wochen
|
||||
- d: Tage
|
||||
- h: Stunden
|
||||
- m: Minuten
|
||||
- s: Sekunden
|
||||
"""
|
||||
weeks = days = hours = minutes = seconds = 0
|
||||
pos = 0
|
||||
length = len(spec)
|
||||
|
||||
while pos < length:
|
||||
start = pos
|
||||
while pos < length and spec[pos].isdigit():
|
||||
pos += 1
|
||||
if start == pos:
|
||||
raise OpenSSLObjectError(
|
||||
f"Invalid relative time spec in {input_name!r}: {spec!r}"
|
||||
)
|
||||
number = int(spec[start:pos])
|
||||
|
||||
if pos >= length:
|
||||
raise OpenSSLObjectError(
|
||||
f"Missing time unit in relative time spec for {input_name!r}: {spec!r}"
|
||||
)
|
||||
|
||||
unit = spec[pos]
|
||||
pos += 1
|
||||
|
||||
if unit == "w":
|
||||
weeks += number
|
||||
elif unit == "d":
|
||||
days += number
|
||||
elif unit == "h":
|
||||
hours += number
|
||||
elif unit == "m":
|
||||
minutes += number
|
||||
elif unit == "s":
|
||||
seconds += number
|
||||
else:
|
||||
raise OpenSSLObjectError(
|
||||
f"Unknown time unit {unit!r} in relative time spec for {input_name!r}: {spec!r}"
|
||||
)
|
||||
|
||||
return timedelta(
|
||||
weeks=weeks,
|
||||
days=days,
|
||||
hours=hours,
|
||||
minutes=minutes,
|
||||
seconds=seconds,
|
||||
)
|
||||
|
||||
|
||||
def get_relative_time_option(
|
||||
value: Optional[str],
|
||||
input_name: str,
|
||||
with_timezone: bool = False,
|
||||
now: Optional[datetime] = None,
|
||||
) -> Optional[datetime]:
|
||||
"""
|
||||
Grob kompatibel zu community.crypto._time.get_relative_time_option.
|
||||
|
||||
Unterstützte Werte:
|
||||
- None / "" / "none" => None
|
||||
- ASN.1 TIME: "YYYYMMDDHHMMSSZ"
|
||||
- relative Zeiten: "[+-]timespec" mit w/d/h/m/s (z.B. "+32w1d2h")
|
||||
- "always" / "forever"
|
||||
|
||||
Hinweis:
|
||||
- with_timezone=True gibt tz-aware UTC-datetime zurück.
|
||||
- with_timezone=False (Default) gibt naives datetime zurück.
|
||||
|
||||
Rückgabe:
|
||||
- datetime (UTC, tz-aware oder naiv) oder None.
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
value = str(value).strip()
|
||||
if not value or value.lower() == "none":
|
||||
return None
|
||||
|
||||
# Sonderfälle: always / forever
|
||||
if value.lower() == "always":
|
||||
dt = datetime(1970, 1, 1, 0, 0, 1, tzinfo=timezone.utc)
|
||||
return dt if with_timezone else dt.replace(tzinfo=None)
|
||||
|
||||
if value.lower() == "forever":
|
||||
dt = datetime(9999, 12, 31, 23, 59, 59, tzinfo=timezone.utc)
|
||||
return dt if with_timezone else dt.replace(tzinfo=None)
|
||||
|
||||
# Relative Zeitangaben
|
||||
if value[0] in "+-":
|
||||
sign = 1 if value[0] == "+" else -1
|
||||
spec = value[1:]
|
||||
delta = _parse_relative_spec(spec, input_name)
|
||||
|
||||
if now is None:
|
||||
# wir rechnen intern in UTC
|
||||
now = datetime.utcnow().replace(tzinfo=timezone.utc)
|
||||
|
||||
dt = now + sign * delta
|
||||
|
||||
return dt if with_timezone else dt.replace(tzinfo=None)
|
||||
|
||||
# Absolute Zeit – zuerst ASN.1 TIME probieren
|
||||
try:
|
||||
dt = _parse_asn1_time(value, input_name)
|
||||
|
||||
# _parse_asn1_time gibt naiv (UTC) zurück
|
||||
if with_timezone:
|
||||
return dt.replace(tzinfo=timezone.utc)
|
||||
return dt
|
||||
except OpenSSLObjectError:
|
||||
# als Fallback ein paar ISO-Formate unterstützen
|
||||
pass
|
||||
|
||||
# einfache ISO-Formate
|
||||
# ISO-Formate: YYYY-MM-DD, YYYY-MM-DDTHH:MM:SS, YYYY-MM-DD HH:MM:SS
|
||||
iso_formats = [
|
||||
"%Y-%m-%d",
|
||||
"%Y-%m-%dT%H:%M:%S",
|
||||
"%Y-%m-%d %H:%M:%S",
|
||||
]
|
||||
for fmt in iso_formats:
|
||||
try:
|
||||
dt = datetime.strptime(value, fmt)
|
||||
# interpretieren als UTC
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt if with_timezone else dt.replace(tzinfo=None)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
# Wenn alles scheitert, Fehler werfen
|
||||
raise OpenSSLObjectError(f"Invalid time format for {input_name!r}: {value!r}")
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# CRL-Parsing (Ersatz für community.crypto.module_backends.crl_info.get_crl_info)
|
||||
# ======================================================================
|
||||
|
||||
|
||||
@dataclass
|
||||
class RevokedCertificateInfo:
|
||||
serial_number: int
|
||||
revocation_date: Optional[str]
|
||||
reason: Optional[str] = None
|
||||
reason_critical: Optional[bool] = None
|
||||
invalidity_date: Optional[str] = None
|
||||
invalidity_date_critical: Optional[bool] = None
|
||||
issuer: Optional[List[str]] = None
|
||||
issuer_critical: Optional[bool] = None
|
||||
|
||||
|
||||
def _load_crl_from_bytes(data: bytes) -> (x509.CertificateRevocationList, str):
|
||||
"""
|
||||
Lädt eine CRL aus PEM- oder DER-Daten und gibt (crl_obj, format) zurück.
|
||||
|
||||
format: "pem" oder "der"
|
||||
"""
|
||||
if not isinstance(data, (bytes, bytearray)):
|
||||
raise OpenSSLObjectError("CRL data must be bytes")
|
||||
|
||||
# Einfache Heuristik: BEGIN-Header => PEM
|
||||
try:
|
||||
if b"-----BEGIN" in data:
|
||||
crl = x509.load_pem_x509_crl(data, default_backend())
|
||||
return crl, "pem"
|
||||
else:
|
||||
crl = x509.load_der_x509_crl(data, default_backend())
|
||||
return crl, "der"
|
||||
|
||||
except Exception as exc:
|
||||
raise OpenSSLObjectError(f"Failed to parse CRL data: {exc}") from exc
|
||||
|
||||
|
||||
def get_crl_info(
|
||||
module,
|
||||
data: bytes,
|
||||
list_revoked_certificates: bool = True,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
CRL-Informationen ähnlich zu community.crypto.module_backends.crl_info.get_crl_info.
|
||||
|
||||
Gibt ein Dict zurück mit u.a.:
|
||||
- format: "pem" | "der"
|
||||
- digest: Signaturalgorithmus (z.B. "sha256")
|
||||
- last_update: ASN.1 TIME (UTC)
|
||||
- next_update: ASN.1 TIME (UTC) oder None
|
||||
- revoked_certificates: Liste von Dicts (wenn list_revoked_certificates=True)
|
||||
"""
|
||||
crl, crl_format = _load_crl_from_bytes(data)
|
||||
|
||||
# Signaturalgorithmus
|
||||
try:
|
||||
digest = crl.signature_hash_algorithm.name
|
||||
except Exception:
|
||||
digest = None
|
||||
|
||||
# Zeitstempel
|
||||
# cryptography hat je nach Version last_update(_utc)/next_update(_utc)
|
||||
last_update_raw = getattr(
|
||||
crl,
|
||||
"last_update",
|
||||
getattr(crl, "last_update_utc", None),
|
||||
)
|
||||
next_update_raw = getattr(
|
||||
crl,
|
||||
"next_update",
|
||||
getattr(crl, "next_update_utc", None),
|
||||
)
|
||||
|
||||
last_update_asn1 = _format_asn1_time(last_update_raw) if last_update_raw else None
|
||||
next_update_asn1 = _format_asn1_time(next_update_raw) if next_update_raw else None
|
||||
|
||||
# Issuer als einfaches Dict (nicht 1:1 wie community.crypto, aber nützlich)
|
||||
issuer = {}
|
||||
try:
|
||||
for attr in crl.issuer:
|
||||
# attr.oid._name ist intern, aber meist "commonName", "organizationName", ...
|
||||
key = getattr(attr.oid, "_name", attr.oid.dotted_string)
|
||||
issuer[key] = attr.value
|
||||
except Exception:
|
||||
issuer = {}
|
||||
|
||||
result: Dict[str, Any] = {
|
||||
"format": crl_format,
|
||||
"digest": digest,
|
||||
"issuer": issuer,
|
||||
"last_update": last_update_asn1,
|
||||
"next_update": next_update_asn1,
|
||||
}
|
||||
|
||||
# Liste der widerrufenen Zertifikate
|
||||
if list_revoked_certificates:
|
||||
revoked_list: List[Dict[str, Any]] = []
|
||||
for r in crl:
|
||||
info = RevokedCertificateInfo(
|
||||
serial_number=r.serial_number,
|
||||
revocation_date=_format_asn1_time(r.revocation_date),
|
||||
)
|
||||
|
||||
# Extensions auswerten (Reason, InvalidityDate, CertificateIssuer)
|
||||
for ext in r.extensions:
|
||||
try:
|
||||
if ext.oid == ExtensionOID.CRL_REASON:
|
||||
# ext.value.reason.name ist Enum-Name (z.B. "KEY_COMPROMISE")
|
||||
info.reason = ext.value.reason.name.lower()
|
||||
info.reason_critical = ext.critical
|
||||
elif ext.oid == ExtensionOID.INVALIDITY_DATE:
|
||||
info.invalidity_date = _format_asn1_time(ext.value)
|
||||
info.invalidity_date_critical = ext.critical
|
||||
elif ext.oid == ExtensionOID.CERTIFICATE_ISSUER:
|
||||
# Liste von GeneralNames in Strings umwandeln
|
||||
info.issuer = [str(g) for g in ext.value]
|
||||
info.issuer_critical = ext.critical
|
||||
except Exception:
|
||||
# Fehler in einzelnen Extensions ignorieren, CRL trotzdem weiter auswerten
|
||||
continue
|
||||
|
||||
revoked_list.append(info.__dict__)
|
||||
|
||||
result["revoked_certificates"] = revoked_list
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Zertifikats-Parsing (Ersatz für CertificateInfoRetrieval)
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def _split_pem_certificates(data: bytes) -> List[bytes]:
|
||||
"""
|
||||
Splittet ein PEM-Blob mit mehreren CERTIFICATE-Objekten in einzelne PEM-Blöcke.
|
||||
"""
|
||||
begin = b"-----BEGIN CERTIFICATE-----"
|
||||
end = b"-----END CERTIFICATE-----"
|
||||
|
||||
parts: List[bytes] = []
|
||||
while True:
|
||||
start = data.find(begin)
|
||||
if start == -1:
|
||||
break
|
||||
stop = data.find(end, start)
|
||||
if stop == -1:
|
||||
break
|
||||
stop = stop + len(end)
|
||||
block = data[start:stop]
|
||||
parts.append(block)
|
||||
data = data[stop:]
|
||||
return parts
|
||||
|
||||
|
||||
def _load_certificates(content: Union[bytes, bytearray, str]) -> List[x509.Certificate]:
|
||||
"""
|
||||
Lädt ein oder mehrere X.509-Zertifikate aus PEM oder DER.
|
||||
"""
|
||||
if isinstance(content, str):
|
||||
content_bytes = content.encode("utf-8")
|
||||
elif isinstance(content, (bytes, bytearray)):
|
||||
content_bytes = bytes(content)
|
||||
else:
|
||||
raise OpenSSLObjectError("Certificate content must be bytes or str")
|
||||
|
||||
certs: List[x509.Certificate] = []
|
||||
|
||||
try:
|
||||
if b"-----BEGIN CERTIFICATE-----" in content_bytes:
|
||||
for block in _split_pem_certificates(content_bytes):
|
||||
certs.append(x509.load_pem_x509_certificate(block, default_backend()))
|
||||
else:
|
||||
certs.append(
|
||||
x509.load_der_x509_certificate(content_bytes, default_backend())
|
||||
)
|
||||
except Exception as exc:
|
||||
raise OpenSSLObjectError(f"Failed to parse certificate(s): {exc}") from exc
|
||||
|
||||
if not certs:
|
||||
raise OpenSSLObjectError("No certificate found in content")
|
||||
|
||||
return certs
|
||||
|
||||
|
||||
def _name_to_dict_and_ordered(name: x509.Name) -> (Dict[str, str], List[List[str]]):
|
||||
"""
|
||||
Konvertiert ein x509.Name in
|
||||
- dict: {oid_name: value}
|
||||
- ordered: [[oid_name, value], ...]
|
||||
Letzte Wiederholung gewinnt im Dict (wie x509_certificate_info).
|
||||
"""
|
||||
result: Dict[str, str] = {}
|
||||
ordered: List[List[str]] = []
|
||||
|
||||
for rdn in name.rdns:
|
||||
for attr in rdn:
|
||||
key = getattr(attr.oid, "_name", attr.oid.dotted_string)
|
||||
value = attr.value
|
||||
result[key] = value
|
||||
ordered.append([key, value])
|
||||
|
||||
return result, ordered
|
||||
|
||||
|
||||
def _get_subject_alt_name(
|
||||
cert: x509.Certificate,
|
||||
) -> (Optional[List[str]], Optional[bool]):
|
||||
"""
|
||||
Liest subjectAltName und gibt (liste, critical) zurück.
|
||||
Liste-Elemente sind Strings wie "DNS:example.com", "IP:1.2.3.4".
|
||||
"""
|
||||
try:
|
||||
ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName)
|
||||
except x509.ExtensionNotFound:
|
||||
return None, None
|
||||
|
||||
values: List[str] = []
|
||||
for gn in ext.value:
|
||||
# cryptography gibt sinnvolle __str__()-Repräsentationen
|
||||
values.append(str(gn))
|
||||
|
||||
return values, ext.critical
|
||||
|
||||
|
||||
def _compute_fingerprints(cert: x509.Certificate) -> Dict[str, str]:
|
||||
"""
|
||||
Fingerprints des gesamten Zertifikats, für gängige Hashes.
|
||||
Hex mit ":" getrennt (wie community.crypto).
|
||||
"""
|
||||
algorithms = [
|
||||
("sha1", hashes.SHA1()),
|
||||
("sha224", hashes.SHA224()),
|
||||
("sha256", hashes.SHA256()),
|
||||
("sha384", hashes.SHA384()),
|
||||
("sha512", hashes.SHA512()),
|
||||
]
|
||||
result: Dict[str, str] = {}
|
||||
|
||||
for name, algo in algorithms:
|
||||
try:
|
||||
fp_bytes = cert.fingerprint(algo)
|
||||
except Exception:
|
||||
continue
|
||||
result[name] = ":".join(f"{b:02x}" for b in fp_bytes)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class CertificateInfoRetrieval:
|
||||
"""
|
||||
Ersatz für community.crypto CertificateInfoRetrieval.
|
||||
|
||||
Nutzung:
|
||||
cert_info = CertificateInfoRetrieval(
|
||||
module=module,
|
||||
content=data,
|
||||
valid_at=module.params.get("valid_at"),
|
||||
)
|
||||
info = cert_info.get_info(prefer_one_fingerprint=False)
|
||||
|
||||
Wichtige Keys im Rückgabewert:
|
||||
- not_before (ASN.1 TIME)
|
||||
- not_after (ASN.1 TIME)
|
||||
- expired (bool)
|
||||
- subject, subject_ordered
|
||||
- issuer, issuer_ordered
|
||||
- subject_alt_name
|
||||
- fingerprints
|
||||
- valid_at
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
module=None,
|
||||
content: Union[bytes, bytearray, str] = None,
|
||||
valid_at: Optional[Dict[str, str]] = None,
|
||||
) -> None:
|
||||
self.module = module
|
||||
if content is None:
|
||||
raise OpenSSLObjectError("CertificateInfoRetrieval requires 'content'")
|
||||
self._certs: List[x509.Certificate] = _load_certificates(content)
|
||||
self._valid_at_specs: Dict[str, str] = valid_at or {}
|
||||
|
||||
def _get_primary_cert(self) -> x509.Certificate:
|
||||
"""
|
||||
Für deine Nutzung reicht das erste Zertifikat (Leaf).
|
||||
"""
|
||||
return self._certs[0]
|
||||
|
||||
def _compute_valid_at(
|
||||
self,
|
||||
not_before_raw: Optional[datetime],
|
||||
not_after_raw: Optional[datetime],
|
||||
) -> Dict[str, bool]:
|
||||
"""
|
||||
Erzeugt das valid_at-Dict basierend auf self._valid_at_specs.
|
||||
Semantik: gültig, wenn
|
||||
not_before <= t <= not_after
|
||||
(alle Zeiten in UTC).
|
||||
"""
|
||||
result: Dict[str, bool] = {}
|
||||
if not self._valid_at_specs:
|
||||
return result
|
||||
|
||||
# Grenzen in UTC-aware umwandeln
|
||||
nb_utc: Optional[datetime] = None
|
||||
na_utc: Optional[datetime] = None
|
||||
|
||||
if not_before_raw is not None:
|
||||
# _to_utc_naive gibt naive UTC; hier machen wir tz-aware
|
||||
nb_utc = _to_utc_naive(not_before_raw).replace(tzinfo=timezone.utc)
|
||||
if not_after_raw is not None:
|
||||
na_utc = _to_utc_naive(not_after_raw).replace(tzinfo=timezone.utc)
|
||||
|
||||
for name, spec in self._valid_at_specs.items():
|
||||
try:
|
||||
point = get_relative_time_option(
|
||||
value=spec,
|
||||
input_name=f"valid_at[{name}]",
|
||||
with_timezone=True,
|
||||
)
|
||||
except OpenSSLObjectError:
|
||||
# ungültige Zeitangabe → False
|
||||
result[name] = False
|
||||
continue
|
||||
|
||||
if point is None:
|
||||
# None interpretieren wir als "kein Check"
|
||||
result[name] = False
|
||||
continue
|
||||
|
||||
is_valid = True
|
||||
if nb_utc is not None and point < nb_utc:
|
||||
is_valid = False
|
||||
if na_utc is not None and point > na_utc:
|
||||
is_valid = False
|
||||
|
||||
result[name] = is_valid
|
||||
|
||||
return result
|
||||
|
||||
def get_info(self, prefer_one_fingerprint: bool = False) -> Dict[str, Any]:
|
||||
"""
|
||||
Liefert ein Info-Dict.
|
||||
|
||||
prefer_one_fingerprint:
|
||||
- False (Default): 'fingerprints' enthält mehrere Hashes.
|
||||
- True: zusätzlich 'fingerprint' / 'public_key_fingerprint' mit bevorzugtem Algo
|
||||
(sha256, Fallback sha1).
|
||||
"""
|
||||
cert = self._get_primary_cert()
|
||||
|
||||
# Zeit
|
||||
not_before_raw = getattr(
|
||||
cert,
|
||||
"not_valid_before",
|
||||
getattr(cert, "not_valid_before_utc", None),
|
||||
)
|
||||
not_after_raw = getattr(
|
||||
cert,
|
||||
"not_valid_after",
|
||||
getattr(cert, "not_valid_after_utc", None),
|
||||
)
|
||||
|
||||
not_before_asn1 = _format_asn1_time(not_before_raw) if not_before_raw else None
|
||||
not_after_asn1 = _format_asn1_time(not_after_raw) if not_after_raw else None
|
||||
|
||||
now_utc_naive = datetime.utcnow()
|
||||
expired = False
|
||||
if not_after_raw is not None:
|
||||
expired = now_utc_naive > _to_utc_naive(not_after_raw)
|
||||
|
||||
# Subject / Issuer
|
||||
subject, subject_ordered = _name_to_dict_and_ordered(cert.subject)
|
||||
issuer, issuer_ordered = _name_to_dict_and_ordered(cert.issuer)
|
||||
|
||||
# SAN
|
||||
subject_alt_name, subject_alt_name_critical = _get_subject_alt_name(cert)
|
||||
|
||||
# Fingerprints
|
||||
fingerprints = _compute_fingerprints(cert)
|
||||
|
||||
# Optional: Public-Key-Fingerprints, wenn du sie brauchst
|
||||
public_key_fingerprints: Dict[str, str] = {}
|
||||
try:
|
||||
pk = cert.public_key()
|
||||
der = pk.public_bytes(
|
||||
encoding=x509.Encoding.DER, # type: ignore[attr-defined]
|
||||
format=x509.PublicFormat.SubjectPublicKeyInfo, # type: ignore[attr-defined]
|
||||
)
|
||||
# kleines Re-Mapping, um _compute_fingerprints wiederzuverwenden
|
||||
pk_cert = x509.load_der_x509_certificate(der, default_backend())
|
||||
public_key_fingerprints = _compute_fingerprints(pk_cert)
|
||||
except Exception:
|
||||
public_key_fingerprints = {}
|
||||
|
||||
# valid_at
|
||||
valid_at = self._compute_valid_at(not_before_raw, not_after_raw)
|
||||
|
||||
info: Dict[str, Any] = {
|
||||
"not_before": not_before_asn1,
|
||||
"not_after": not_after_asn1,
|
||||
"expired": expired,
|
||||
"subject": subject,
|
||||
"subject_ordered": subject_ordered,
|
||||
"issuer": issuer,
|
||||
"issuer_ordered": issuer_ordered,
|
||||
"subject_alt_name": subject_alt_name,
|
||||
"subject_alt_name_critical": subject_alt_name_critical,
|
||||
"fingerprints": fingerprints,
|
||||
"public_key_fingerprints": public_key_fingerprints,
|
||||
"valid_at": valid_at,
|
||||
}
|
||||
|
||||
# prefer_one_fingerprint: wähle "bevorzugten" Algo (sha256, sonst sha1)
|
||||
if prefer_one_fingerprint:
|
||||
|
||||
def _pick_fp(src: Dict[str, str]) -> Optional[str]:
|
||||
if not src:
|
||||
return None
|
||||
for algo in ("sha256", "sha1", "sha512"):
|
||||
if algo in src:
|
||||
return src[algo]
|
||||
# Fallback: irgend einen nehmen
|
||||
return next(iter(src.values()))
|
||||
|
||||
fp = _pick_fp(fingerprints)
|
||||
if fp is not None:
|
||||
info["fingerprint"] = fp
|
||||
|
||||
pk_fp = _pick_fp(public_key_fingerprints)
|
||||
if pk_fp is not None:
|
||||
info["public_key_fingerprint"] = pk_fp
|
||||
|
||||
return info
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,284 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import difflib
|
||||
import itertools
|
||||
import json
|
||||
import textwrap
|
||||
import typing
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class SideBySide:
|
||||
"""
|
||||
Erlaubt nebeneinanderstehende Vergleiche (Side‐by‐Side) von zwei Text-Versionen.
|
||||
Jetzt mit Ausgabe der Zeilennummern bei Änderungen.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
module,
|
||||
left: typing.Union[str, dict, typing.List[str]],
|
||||
right: typing.Union[str, dict, typing.List[str]],
|
||||
):
|
||||
"""
|
||||
:param module: Objekt mit einer .log(...)‐Methode zum Debuggen
|
||||
:param left: Ursprünglicher Text (dict, String oder Liste von Zeilen)
|
||||
:param right: Neuer Text (dict, String oder Liste von Zeilen)
|
||||
"""
|
||||
self.module = module
|
||||
self.default_separator = " | "
|
||||
self.left = self._normalize_input(left)
|
||||
self.right = self._normalize_input(right)
|
||||
|
||||
@staticmethod
|
||||
def _normalize_input(
|
||||
data: typing.Union[str, dict, typing.List[str]],
|
||||
) -> typing.List[str]:
|
||||
"""
|
||||
Konvertiert dict → JSON‐String, String → Liste von Zeilen (splitlines),
|
||||
Liste bleibt unverändert (kopiert).
|
||||
"""
|
||||
if isinstance(data, dict):
|
||||
data = json.dumps(data, indent=2)
|
||||
if isinstance(data, str):
|
||||
return data.splitlines()
|
||||
if isinstance(data, list):
|
||||
return data.copy()
|
||||
raise TypeError(f"Erwartet dict, str oder List[str], nicht {type(data)}")
|
||||
|
||||
@staticmethod
|
||||
def _wrap_and_flatten(lines: typing.List[str], width: int) -> typing.List[str]:
|
||||
"""
|
||||
Wrappt jede Zeile auf maximal `width` Zeichen und flacht verschachtelte Listen ab.
|
||||
Leere Zeilen bleiben als [""] erhalten.
|
||||
"""
|
||||
wrapper = textwrap.TextWrapper(
|
||||
width=width,
|
||||
break_long_words=False,
|
||||
replace_whitespace=False,
|
||||
)
|
||||
flat: typing.List[str] = []
|
||||
for line in lines:
|
||||
wrapped = wrapper.wrap(line)
|
||||
if not wrapped:
|
||||
# Wenn wrapper.wrap("") → [] → wir wollen [""] erhalten
|
||||
flat.append("")
|
||||
else:
|
||||
flat.extend(wrapped)
|
||||
return flat
|
||||
|
||||
def side_by_side(
|
||||
self,
|
||||
left: typing.List[str],
|
||||
right: typing.List[str],
|
||||
width: int = 78,
|
||||
as_string: bool = False,
|
||||
separator: typing.Optional[str] = None,
|
||||
left_title: typing.Optional[str] = None,
|
||||
right_title: typing.Optional[str] = None,
|
||||
) -> typing.Union[str, typing.List[str]]:
|
||||
"""
|
||||
Gibt nebeneinanderstehende Zeilen zurück:
|
||||
[Links-Text][Padding][separator][Rechts-Text]
|
||||
|
||||
:param left: Liste von Zeilen (bereits nummeriert/aufbereitet)
|
||||
:param right: Liste von Zeilen (bereits nummeriert/aufbereitet)
|
||||
:param width: Maximale Gesamtbreite (inkl. Separator)
|
||||
:param as_string: True → Rückgabe als einziger String mit "\n"
|
||||
:param separator: String, der links und rechts trennt (Default " | ")
|
||||
:param left_title: Überschrift ganz oben links (optional)
|
||||
:param right_title: Überschrift ganz oben rechts (optional)
|
||||
:return: Entweder List[str] oder ein einziger String
|
||||
"""
|
||||
sep = separator or self.default_separator
|
||||
# Berechne, wie viele Zeichen pro Seite bleiben:
|
||||
side_width = (width - len(sep) - (1 - width % 2)) // 2
|
||||
|
||||
# Wrap/flatten beide Seiten
|
||||
left_wrapped = self._wrap_and_flatten(left, side_width)
|
||||
right_wrapped = self._wrap_and_flatten(right, side_width)
|
||||
|
||||
# Paare bilden, fehlende Zeilen mit leerem String auffüllen
|
||||
pairs = list(itertools.zip_longest(left_wrapped, right_wrapped, fillvalue=""))
|
||||
|
||||
# Falls Überschriften angegeben, voranstellen (einschließlich Unterstreichung)
|
||||
if left_title or right_title:
|
||||
lt = left_title or ""
|
||||
rt = right_title or ""
|
||||
underline = "-" * side_width
|
||||
header = [(lt, rt), (underline, underline)]
|
||||
pairs = header + pairs
|
||||
|
||||
# Jetzt jede Zeile zusammenbauen
|
||||
lines: typing.List[str] = []
|
||||
for l_line, r_line in pairs:
|
||||
l_text = l_line or ""
|
||||
r_text = r_line or ""
|
||||
pad = " " * max(0, side_width - len(l_text))
|
||||
lines.append(f"{l_text}{pad}{sep}{r_text}")
|
||||
|
||||
return "\n".join(lines) if as_string else lines
|
||||
|
||||
def better_diff(
|
||||
self,
|
||||
left: typing.Union[str, typing.List[str]],
|
||||
right: typing.Union[str, typing.List[str]],
|
||||
width: int = 78,
|
||||
as_string: bool = True,
|
||||
separator: typing.Optional[str] = None,
|
||||
left_title: typing.Optional[str] = None,
|
||||
right_title: typing.Optional[str] = None,
|
||||
) -> typing.Union[str, typing.List[str]]:
|
||||
"""
|
||||
Gibt einen Side-by-Side-Diff mit Markierung von gleichen/entfernten/hinzugefügten Zeilen
|
||||
und zusätzlich mit den Zeilennummern in den beiden Input-Dateien.
|
||||
|
||||
Syntax der Prefixe:
|
||||
" " → Zeile vorhanden in beiden Dateien
|
||||
"- " → Zeile nur in der linken Datei
|
||||
"+ " → Zeile nur in der rechten Datei
|
||||
"? " → wird komplett ignoriert
|
||||
|
||||
Die Ausgabe hat Form:
|
||||
<LNr>: <Linke-Zeile> | <RNr>: <Rechte-Zeile>
|
||||
bzw. bei fehlender link/rechts-Zeile:
|
||||
<LNr>: <Linke-Zeile> | -
|
||||
- | <RNr>: <Rechte-Zeile>
|
||||
|
||||
:param left: Ursprungstext als String oder Liste von Zeilen
|
||||
:param right: Vergleichstext als String oder Liste von Zeilen
|
||||
:param width: Gesamtbreite inkl. Separator
|
||||
:param as_string: True, um einen einzelnen String zurückzubekommen
|
||||
:param separator: Trenner (Standard: " | ")
|
||||
:param left_title: Überschrift links (optional)
|
||||
:param right_title: Überschrift rechts (optional)
|
||||
:return: Side-by-Side-Liste oder einzelner String
|
||||
"""
|
||||
# 1) Ausgangsdaten normalisieren
|
||||
l_lines = left.splitlines() if isinstance(left, str) else left.copy()
|
||||
r_lines = right.splitlines() if isinstance(right, str) else right.copy()
|
||||
|
||||
# 2) Differenz-Berechnung
|
||||
differ = difflib.Differ()
|
||||
diffed = list(differ.compare(l_lines, r_lines))
|
||||
|
||||
# 3) Zähler für Zeilennummern
|
||||
left_lineno = 1
|
||||
right_lineno = 1
|
||||
|
||||
left_side: typing.List[str] = []
|
||||
right_side: typing.List[str] = []
|
||||
|
||||
# 4) Durchlaufe alle Diff‐Einträge
|
||||
for entry in diffed:
|
||||
code = entry[:2] # " ", "- ", "+ " oder "? "
|
||||
content = entry[2:] # Der eigentliche Text
|
||||
|
||||
if code == " ":
|
||||
# Zeile existiert in beiden Dateien
|
||||
# Linke Seite: " <LNr>: <Text>"
|
||||
# Rechte Seite: " <RNr>: <Text>"
|
||||
left_side.append(f"{left_lineno:>4}: {content}")
|
||||
right_side.append(f"{right_lineno:>4}: {content}")
|
||||
left_lineno += 1
|
||||
right_lineno += 1
|
||||
|
||||
elif code == "- ":
|
||||
# Nur in der linken Datei
|
||||
left_side.append(f"{left_lineno:>4}: {content}")
|
||||
# Rechts ein Platzhalter "-" ohne Nummer
|
||||
right_side.append(" -")
|
||||
left_lineno += 1
|
||||
|
||||
elif code == "+ ":
|
||||
# Nur in der rechten Datei
|
||||
# Links wird ein "+" angezeigt, ohne LNr
|
||||
left_side.append(" +")
|
||||
right_side.append(f"{right_lineno:>4}: {content}")
|
||||
right_lineno += 1
|
||||
|
||||
# "? " ignorieren wir komplett
|
||||
|
||||
# 5) Nun übergeben wir die nummerierten Zeilen an side_by_side()
|
||||
return self.side_by_side(
|
||||
left=left_side,
|
||||
right=right_side,
|
||||
width=width,
|
||||
as_string=as_string,
|
||||
separator=separator,
|
||||
left_title=left_title,
|
||||
right_title=right_title,
|
||||
)
|
||||
|
||||
def diff(
|
||||
self,
|
||||
width: int = 78,
|
||||
as_string: bool = True,
|
||||
separator: typing.Optional[str] = None,
|
||||
left_title: typing.Optional[str] = None,
|
||||
right_title: typing.Optional[str] = None,
|
||||
) -> typing.Union[str, typing.List[str]]:
|
||||
"""
|
||||
Führt better_diff() für die in __init__ geladenen left/right‐Strings aus.
|
||||
|
||||
:param width: Gesamtbreite inkl. Separator
|
||||
:param as_string: True, um einen einzelnen String zurückzubekommen
|
||||
:param separator: Trenner (Standard: " | ")
|
||||
:param left_title: Überschrift links (optional)
|
||||
:param right_title: Überschrift rechts (optional)
|
||||
|
||||
:return: Side-by-Side-Liste oder einzelner String
|
||||
"""
|
||||
return self.better_diff(
|
||||
left=self.left,
|
||||
right=self.right,
|
||||
width=width,
|
||||
as_string=as_string,
|
||||
separator=separator,
|
||||
left_title=left_title,
|
||||
right_title=right_title,
|
||||
)
|
||||
|
||||
def diff_between_files(
|
||||
self,
|
||||
file_1: typing.Union[str, Path],
|
||||
file_2: typing.Union[str, Path],
|
||||
) -> typing.Union[str, typing.List[str]]:
|
||||
"""
|
||||
Liest zwei Dateien ein und liefert ihren Side-by-Side‐Diff (mit Zeilennummern).
|
||||
|
||||
:param file_1: Pfad zur ersten Datei
|
||||
:param file_2: Pfad zur zweiten Datei
|
||||
:return: Liste der formatierten Zeilen oder einziger String (as_string=True)
|
||||
"""
|
||||
f1 = Path(file_1)
|
||||
f2 = Path(file_2)
|
||||
|
||||
self.module.log(f"diff_between_files({f1}, {f2})")
|
||||
|
||||
if not f1.is_file() or not f2.is_file():
|
||||
self.module.log(f" Eine oder beide Dateien existieren nicht: {f1}, {f2}")
|
||||
# Hier geben wir für den Fall „Datei fehlt“ einfach einen leeren String zurück.
|
||||
return ""
|
||||
|
||||
# Dateien in Listen von Zeilen einlesen (ohne trailing "\n")
|
||||
old_lines = f1.read_text(encoding="utf-8").splitlines()
|
||||
new_lines = f2.read_text(encoding="utf-8").splitlines()
|
||||
|
||||
self.module.log(f" Gelesen: {len(old_lines)} Zeilen aus {f1}")
|
||||
self.module.log(f" Gelesen: {len(new_lines)} Zeilen aus {f2}")
|
||||
|
||||
diffed = self.better_diff(
|
||||
left=old_lines,
|
||||
right=new_lines,
|
||||
width=140,
|
||||
as_string=True,
|
||||
separator=self.default_separator,
|
||||
left_title=" Original",
|
||||
right_title=" Update",
|
||||
)
|
||||
|
||||
# Nur einen Auszug fürs Logging (z.B. erste 200 Zeichen)
|
||||
self.module.log(f" diffed output (gekürzt):\n{diffed[:200]}...")
|
||||
return diffed
|
||||
|
|
@ -1,221 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import grp
|
||||
import os
|
||||
import pwd
|
||||
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.lists import find_in_list
|
||||
|
||||
|
||||
def create_directory(directory, owner=None, group=None, mode=None):
|
||||
""" """
|
||||
try:
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
except FileExistsError:
|
||||
pass
|
||||
|
||||
if mode is not None:
|
||||
os.chmod(directory, int(mode, base=8))
|
||||
|
||||
if owner is not None:
|
||||
try:
|
||||
owner = pwd.getpwnam(owner).pw_uid
|
||||
except KeyError:
|
||||
owner = int(owner)
|
||||
pass
|
||||
else:
|
||||
owner = 0
|
||||
|
||||
if group is not None:
|
||||
try:
|
||||
group = grp.getgrnam(group).gr_gid
|
||||
except KeyError:
|
||||
group = int(group)
|
||||
pass
|
||||
else:
|
||||
group = 0
|
||||
|
||||
if os.path.isdir(directory) and owner and group:
|
||||
os.chown(directory, int(owner), int(group))
|
||||
|
||||
if os.path.isdir(directory):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def create_directory_tree(directory_tree, current_state):
|
||||
""" """
|
||||
for entry in directory_tree:
|
||||
""" """
|
||||
source = entry.get("source")
|
||||
source_handling = entry.get("source_handling", {})
|
||||
force_create = source_handling.get("create", None)
|
||||
force_owner = source_handling.get("owner", None)
|
||||
force_group = source_handling.get("group", None)
|
||||
force_mode = source_handling.get("mode", None)
|
||||
|
||||
curr = find_in_list(current_state, source)
|
||||
|
||||
current_owner = curr[source].get("owner")
|
||||
current_group = curr[source].get("group")
|
||||
|
||||
# create directory
|
||||
if force_create is not None and not force_create:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
os.makedirs(source, exist_ok=True)
|
||||
except FileExistsError:
|
||||
pass
|
||||
|
||||
# change mode
|
||||
if os.path.isdir(source) and force_mode is not None:
|
||||
if isinstance(force_mode, int):
|
||||
mode = int(str(force_mode), base=8)
|
||||
if isinstance(force_mode, str):
|
||||
mode = int(force_mode, base=8)
|
||||
|
||||
os.chmod(source, mode)
|
||||
|
||||
# change ownership
|
||||
if force_owner is not None or force_group is not None:
|
||||
""" """
|
||||
if os.path.isdir(source):
|
||||
""" """
|
||||
if force_owner is not None:
|
||||
try:
|
||||
force_owner = pwd.getpwnam(str(force_owner)).pw_uid
|
||||
except KeyError:
|
||||
force_owner = int(force_owner)
|
||||
pass
|
||||
elif current_owner is not None:
|
||||
force_owner = current_owner
|
||||
else:
|
||||
force_owner = 0
|
||||
|
||||
if force_group is not None:
|
||||
try:
|
||||
force_group = grp.getgrnam(str(force_group)).gr_gid
|
||||
except KeyError:
|
||||
force_group = int(force_group)
|
||||
pass
|
||||
elif current_group is not None:
|
||||
force_group = current_group
|
||||
else:
|
||||
force_group = 0
|
||||
|
||||
os.chown(source, int(force_owner), int(force_group))
|
||||
|
||||
|
||||
def permstr_to_octal(modestr, umask):
|
||||
"""
|
||||
Convert a Unix permission string (rw-r--r--) into a mode (0644)
|
||||
"""
|
||||
revstr = modestr[::-1]
|
||||
mode = 0
|
||||
for j in range(0, 3):
|
||||
for i in range(0, 3):
|
||||
if revstr[i + 3 * j] in ["r", "w", "x", "s", "t"]:
|
||||
mode += 2 ** (i + 3 * j)
|
||||
|
||||
return mode & ~umask
|
||||
|
||||
|
||||
def current_state(directory):
|
||||
""" """
|
||||
current_owner = None
|
||||
current_group = None
|
||||
current_mode = None
|
||||
|
||||
if os.path.isdir(directory):
|
||||
_state = os.stat(directory)
|
||||
try:
|
||||
current_owner = pwd.getpwuid(_state.st_uid).pw_uid
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
current_group = grp.getgrgid(_state.st_gid).gr_gid
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
current_mode = oct(_state.st_mode)[-4:]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return current_owner, current_group, current_mode
|
||||
|
||||
|
||||
def fix_ownership(directory, force_owner=None, force_group=None, force_mode=False):
|
||||
""" """
|
||||
changed = False
|
||||
error_msg = None
|
||||
|
||||
if os.path.isdir(directory):
|
||||
current_owner, current_group, current_mode = current_state(directory)
|
||||
|
||||
# change mode
|
||||
if force_mode is not None and force_mode != current_mode:
|
||||
try:
|
||||
if isinstance(force_mode, int):
|
||||
mode = int(str(force_mode), base=8)
|
||||
except Exception as e:
|
||||
error_msg = f" - ERROR '{e}'"
|
||||
print(error_msg)
|
||||
|
||||
try:
|
||||
if isinstance(force_mode, str):
|
||||
mode = int(force_mode, base=8)
|
||||
except Exception as e:
|
||||
error_msg = f" - ERROR '{e}'"
|
||||
print(error_msg)
|
||||
|
||||
os.chmod(directory, mode)
|
||||
|
||||
# change ownership
|
||||
if (
|
||||
force_owner is not None
|
||||
or force_group is not None
|
||||
and (force_owner != current_owner or force_group != current_group)
|
||||
):
|
||||
if force_owner is not None:
|
||||
try:
|
||||
force_owner = pwd.getpwnam(str(force_owner)).pw_uid
|
||||
except KeyError:
|
||||
force_owner = int(force_owner)
|
||||
pass
|
||||
elif current_owner is not None:
|
||||
force_owner = current_owner
|
||||
else:
|
||||
force_owner = 0
|
||||
|
||||
if force_group is not None:
|
||||
try:
|
||||
force_group = grp.getgrnam(str(force_group)).gr_gid
|
||||
except KeyError:
|
||||
force_group = int(force_group)
|
||||
pass
|
||||
elif current_group is not None:
|
||||
force_group = current_group
|
||||
else:
|
||||
force_group = 0
|
||||
|
||||
os.chown(directory, int(force_owner), int(force_group))
|
||||
|
||||
_owner, _group, _mode = current_state(directory)
|
||||
|
||||
if (
|
||||
(current_owner != _owner)
|
||||
or (current_group != _group)
|
||||
or (current_mode != _mode)
|
||||
):
|
||||
changed = True
|
||||
|
||||
return changed, error_msg
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import dns.exception
|
||||
from dns.resolver import Resolver
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
def dns_lookup(dns_name, timeout=3, dns_resolvers=[]):
|
||||
"""
|
||||
Perform a simple DNS lookup, return results in a dictionary
|
||||
"""
|
||||
resolver = Resolver()
|
||||
resolver.timeout = float(timeout)
|
||||
resolver.lifetime = float(timeout)
|
||||
|
||||
result = {}
|
||||
|
||||
if not dns_name:
|
||||
return {
|
||||
"addrs": [],
|
||||
"error": True,
|
||||
"error_msg": "No DNS Name for resolving given",
|
||||
"name": dns_name,
|
||||
}
|
||||
|
||||
if dns_resolvers:
|
||||
resolver.nameservers = dns_resolvers
|
||||
try:
|
||||
records = resolver.resolve(dns_name)
|
||||
result = {
|
||||
"addrs": [ii.address for ii in records],
|
||||
"error": False,
|
||||
"error_msg": "",
|
||||
"name": dns_name,
|
||||
}
|
||||
except dns.resolver.NXDOMAIN:
|
||||
result = {
|
||||
"addrs": [],
|
||||
"error": True,
|
||||
"error_msg": "No such domain",
|
||||
"name": dns_name,
|
||||
}
|
||||
except dns.resolver.NoNameservers as e:
|
||||
result = {
|
||||
"addrs": [],
|
||||
"error": True,
|
||||
"error_msg": repr(e),
|
||||
"name": dns_name,
|
||||
}
|
||||
except dns.resolver.Timeout:
|
||||
result = {
|
||||
"addrs": [],
|
||||
"error": True,
|
||||
"error_msg": "Timed out while resolving",
|
||||
"name": dns_name,
|
||||
}
|
||||
except dns.resolver.NameError as e:
|
||||
result = {
|
||||
"addrs": [],
|
||||
"error": True,
|
||||
"error_msg": repr(e),
|
||||
"name": dns_name,
|
||||
}
|
||||
except dns.exception.DNSException as e:
|
||||
result = {
|
||||
"addrs": [],
|
||||
"error": True,
|
||||
"error_msg": f"Unhandled exception ({repr(e)})",
|
||||
"name": dns_name,
|
||||
}
|
||||
|
||||
return result
|
||||
|
|
@ -1,493 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import os
|
||||
from typing import Any, List, Sequence, Tuple, Union
|
||||
|
||||
EasyRSAResult = Tuple[int, bool, Union[str, List[str]]]
|
||||
ExecResult = Tuple[int, str, str]
|
||||
|
||||
|
||||
class EasyRSA:
|
||||
"""
|
||||
Thin wrapper around the `easyrsa` CLI to manage a simple PKI lifecycle.
|
||||
|
||||
The class is designed to be used from an Ansible context (``module``),
|
||||
relying on the module to provide:
|
||||
- ``module.params`` for runtime parameters (e.g. ``force``)
|
||||
- ``module.log(...)`` for logging
|
||||
- ``module.get_bin_path("easyrsa", required=True)`` to locate the binary
|
||||
- ``module.run_command([...])`` to execute commands
|
||||
|
||||
Attributes:
|
||||
module: Ansible module-like object providing logging and command execution.
|
||||
state: Internal state placeholder (currently unused).
|
||||
force: Whether to force actions (read from ``module.params['force']``).
|
||||
pki_dir: Path to the PKI directory (commonly ``/etc/easy-rsa/pki``).
|
||||
req_cn_ca: Common name (CN) used when building the CA.
|
||||
req_cn_server: Common name (CN) used for server requests/certificates.
|
||||
ca_keysize: RSA key size for CA key generation.
|
||||
dh_keysize: DH parameter size for DH generation.
|
||||
working_dir: Working directory context (currently not used for chdir).
|
||||
easyrsa: Resolved path to the ``easyrsa`` executable.
|
||||
easyrsa_directory: Base directory used by some file existence checks
|
||||
(defaults to ``/etc/easy-rsa``).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
module: Any,
|
||||
force: bool = False,
|
||||
pki_dir: str = "",
|
||||
req_cn_ca: str = "",
|
||||
req_cn_server: str = "",
|
||||
ca_keysize: int = 4086,
|
||||
dh_keysize: int = 2048,
|
||||
working_dir: str = "",
|
||||
) -> None:
|
||||
"""
|
||||
Create an EasyRSA helper instance.
|
||||
|
||||
Args:
|
||||
module: Ansible module-like object used for logging and running commands.
|
||||
force: Optional force flag (note: the effective value is read from
|
||||
``module.params.get("force", False)``).
|
||||
pki_dir: Path to PKI directory (e.g. ``/etc/easy-rsa/pki``).
|
||||
req_cn_ca: CA request common name (CN) used for ``build-ca``.
|
||||
req_cn_server: Server common name (CN) used for ``gen-req`` and ``sign-req``.
|
||||
ca_keysize: RSA key size for the CA.
|
||||
dh_keysize: DH parameter size.
|
||||
working_dir: Intended working directory for running commands (not applied).
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.module = module
|
||||
|
||||
self.module.log(
|
||||
"EasyRSA::__init__("
|
||||
f"force={force}, pki_dir={pki_dir}, "
|
||||
f"req_cn_ca={req_cn_ca}, req_cn_server={req_cn_server}, "
|
||||
f"ca_keysize={ca_keysize}, dh_keysize={dh_keysize}, "
|
||||
f"working_dir={working_dir}"
|
||||
")"
|
||||
)
|
||||
|
||||
self.state = ""
|
||||
|
||||
self.force = module.params.get("force", False)
|
||||
self.pki_dir = pki_dir
|
||||
self.req_cn_ca = req_cn_ca
|
||||
self.req_cn_server = req_cn_server
|
||||
self.ca_keysize = ca_keysize
|
||||
self.dh_keysize = dh_keysize
|
||||
self.working_dir = working_dir
|
||||
|
||||
self.easyrsa = module.get_bin_path("easyrsa", True)
|
||||
|
||||
self.easyrsa_directory = "/etc/easy-rsa"
|
||||
|
||||
# ----------------------------------------------------------------------------------------------
|
||||
# Public API - create
|
||||
def create_pki(self) -> Tuple[int, bool, str]:
|
||||
"""
|
||||
Initialize the PKI directory via ``easyrsa init-pki``.
|
||||
|
||||
The method performs an idempotency check using :meth:`validate_pki` and
|
||||
returns unchanged when the PKI directory already exists.
|
||||
|
||||
Returns:
|
||||
tuple[int, bool, str]: (rc, changed, message)
|
||||
rc: 0 on success, non-zero on failure.
|
||||
changed: True if the PKI was created, False if it already existed.
|
||||
message: Human-readable status message.
|
||||
"""
|
||||
self.module.log(msg="EasyRsa::create_pki()")
|
||||
|
||||
if self.validate_pki():
|
||||
return (0, False, "PKI already created")
|
||||
|
||||
args: List[str] = []
|
||||
args.append(self.easyrsa)
|
||||
args.append("init-pki")
|
||||
|
||||
rc, out, err = self._exec(args)
|
||||
|
||||
if self.validate_pki():
|
||||
return (0, True, "The PKI was successfully created.")
|
||||
else:
|
||||
return (1, True, "An error occurred while creating the PKI.")
|
||||
|
||||
def build_ca(self) -> EasyRSAResult:
|
||||
"""
|
||||
Build a new certificate authority (CA) via ``easyrsa build-ca nopass``.
|
||||
|
||||
Performs an idempotency check using :meth:`validate_ca`. When the CA does not
|
||||
exist, this runs Easy-RSA in batch mode and checks for the existence of:
|
||||
- ``<easyrsa_directory>/pki/ca.crt``
|
||||
- ``<easyrsa_directory>/pki/private/ca.key``
|
||||
|
||||
Returns:
|
||||
tuple[int, bool, Union[str, list[str]]]: (rc, changed, output)
|
||||
rc: 0 on success; 3 if expected files were not created; otherwise
|
||||
the underlying command return code.
|
||||
changed: False if the CA already existed; True if a build was attempted.
|
||||
output: Combined stdout/stderr lines (list[str]) or a success message (str).
|
||||
"""
|
||||
if self.validate_ca():
|
||||
return (0, False, "CA already created")
|
||||
|
||||
args: List[str] = []
|
||||
args.append(self.easyrsa)
|
||||
args.append("--batch")
|
||||
# args.append(f"--pki-dir={self._pki_dir}")
|
||||
args.append(f"--req-cn={self.req_cn_ca}")
|
||||
|
||||
if self.ca_keysize:
|
||||
args.append(f"--keysize={self.ca_keysize}")
|
||||
args.append("build-ca")
|
||||
args.append("nopass")
|
||||
|
||||
rc, out, err = self._exec(args)
|
||||
_output: Union[str, List[str]] = self.result_values(out, err)
|
||||
|
||||
ca_crt_file = os.path.join(self.easyrsa_directory, "pki", "ca.crt")
|
||||
ca_key_file = os.path.join(self.easyrsa_directory, "pki", "private", "ca.key")
|
||||
|
||||
if os.path.exists(ca_crt_file) and os.path.exists(ca_key_file):
|
||||
rc = 0
|
||||
_output = "ca.crt and ca.key were successfully created."
|
||||
else:
|
||||
rc = 3
|
||||
|
||||
return (rc, True, _output)
|
||||
|
||||
def gen_crl(self) -> EasyRSAResult:
|
||||
"""
|
||||
Generate a certificate revocation list (CRL) via ``easyrsa gen-crl``.
|
||||
|
||||
Performs an idempotency check using :meth:`validate_crl` and checks for
|
||||
``<easyrsa_directory>/pki/crl.pem`` after execution.
|
||||
|
||||
Returns:
|
||||
tuple[int, bool, Union[str, list[str]]]: (rc, changed, output)
|
||||
rc: 0 on success; 3 if expected file was not created; otherwise
|
||||
the underlying command return code.
|
||||
changed: False if CRL already existed; True if generation was attempted.
|
||||
output: Combined stdout/stderr lines (list[str]) or a success message (str).
|
||||
"""
|
||||
self.module.log("EasyRSA::gen_crl()")
|
||||
|
||||
if self.validate_crl():
|
||||
return (0, False, "CRL already created")
|
||||
|
||||
args: List[str] = []
|
||||
args.append(self.easyrsa)
|
||||
# args.append(f"--pki-dir={self._pki_dir}")
|
||||
args.append("gen-crl")
|
||||
|
||||
rc, out, err = self._exec(args)
|
||||
|
||||
# self.module.log(f" rc : {rc}")
|
||||
# self.module.log(f" out: {out}")
|
||||
# self.module.log(f" err: {err}")
|
||||
|
||||
_output: Union[str, List[str]] = self.result_values(out, err)
|
||||
|
||||
crl_pem_file = os.path.join(self.easyrsa_directory, "pki", "crl.pem")
|
||||
|
||||
if os.path.exists(crl_pem_file):
|
||||
rc = 0
|
||||
_output = "crl.pem were successfully created."
|
||||
else:
|
||||
rc = 3
|
||||
|
||||
return (rc, True, _output)
|
||||
|
||||
def gen_req(self) -> EasyRSAResult:
|
||||
"""
|
||||
Generate a private key and certificate signing request (CSR) via
|
||||
``easyrsa gen-req <req_cn_server> nopass``.
|
||||
|
||||
Performs an idempotency check using :meth:`validate_req` and checks for:
|
||||
- ``<easyrsa_directory>/pki/reqs/<req_cn_server>.req`` after execution.
|
||||
|
||||
Returns:
|
||||
tuple[int, bool, Union[str, list[str]]]: (rc, changed, output)
|
||||
rc: 0 on success; 3 if expected file was not created; otherwise
|
||||
the underlying command return code.
|
||||
changed: False if request already existed; True if generation was attempted.
|
||||
output: Combined stdout/stderr lines (list[str]) or a success message (str).
|
||||
"""
|
||||
if self.validate_req():
|
||||
return (0, False, "keypair and request already created")
|
||||
|
||||
args: List[str] = []
|
||||
args.append(self.easyrsa)
|
||||
args.append("--batch")
|
||||
# args.append(f"--pki-dir={self._pki_dir}")
|
||||
if self.req_cn_ca:
|
||||
args.append(f"--req-cn={self.req_cn_ca}")
|
||||
args.append("gen-req")
|
||||
args.append(self.req_cn_server)
|
||||
args.append("nopass")
|
||||
|
||||
rc, out, err = self._exec(args)
|
||||
_output: Union[str, List[str]] = self.result_values(out, err)
|
||||
|
||||
req_file = os.path.join(
|
||||
self.easyrsa_directory, "pki", "reqs", f"{self.req_cn_server}.req"
|
||||
)
|
||||
|
||||
if os.path.exists(req_file):
|
||||
rc = 0
|
||||
_output = f"{self.req_cn_server}.req were successfully created."
|
||||
else:
|
||||
rc = 3
|
||||
|
||||
return (rc, True, _output)
|
||||
|
||||
def sign_req(self) -> EasyRSAResult:
|
||||
"""
|
||||
Sign the server request and generate a certificate via
|
||||
``easyrsa sign-req server <req_cn_server>``.
|
||||
|
||||
Performs an idempotency check using :meth:`validate_sign` and checks for:
|
||||
- ``<easyrsa_directory>/pki/issued/<req_cn_server>.crt`` after execution.
|
||||
|
||||
Returns:
|
||||
tuple[int, bool, Union[str, list[str]]]: (rc, changed, output)
|
||||
rc: 0 on success; 3 if expected file was not created; otherwise
|
||||
the underlying command return code.
|
||||
changed: False if the certificate already existed; True if signing was attempted.
|
||||
output: Combined stdout/stderr lines (list[str]) or a success message (str).
|
||||
"""
|
||||
if self.validate_sign():
|
||||
return (0, False, "certificate alread signed")
|
||||
|
||||
args: List[str] = []
|
||||
args.append(self.easyrsa)
|
||||
args.append("--batch")
|
||||
# args.append(f"--pki-dir={self._pki_dir}")
|
||||
args.append("sign-req")
|
||||
args.append("server")
|
||||
args.append(self.req_cn_server)
|
||||
|
||||
rc, out, err = self._exec(args)
|
||||
_output: Union[str, List[str]] = self.result_values(out, err)
|
||||
|
||||
crt_file = os.path.join(
|
||||
self.easyrsa_directory, "pki", "issued", f"{self.req_cn_server}.crt"
|
||||
)
|
||||
|
||||
if os.path.exists(crt_file):
|
||||
rc = 0
|
||||
_output = f"{self.req_cn_server}.crt were successfully created."
|
||||
else:
|
||||
rc = 3
|
||||
|
||||
return (rc, True, _output)
|
||||
|
||||
def gen_dh(self) -> EasyRSAResult:
|
||||
"""
|
||||
Generate Diffie-Hellman parameters via ``easyrsa gen-dh``.
|
||||
|
||||
Performs an idempotency check using :meth:`validate_dh` and checks for:
|
||||
- ``<easyrsa_directory>/pki/dh.pem`` after execution.
|
||||
|
||||
Returns:
|
||||
tuple[int, bool, Union[str, list[str]]]: (rc, changed, output)
|
||||
rc: 0 on success; 3 if expected file was not created; otherwise
|
||||
the underlying command return code.
|
||||
changed: False if DH params already existed; True if generation was attempted.
|
||||
output: Combined stdout/stderr lines (list[str]) or a success message (str).
|
||||
"""
|
||||
if self.validate_dh():
|
||||
return (0, False, "DH already created")
|
||||
|
||||
args: List[str] = []
|
||||
args.append(self.easyrsa)
|
||||
# args.append(f"--pki-dir={self._pki_dir}")
|
||||
if self.dh_keysize:
|
||||
args.append(f"--keysize={self.dh_keysize}")
|
||||
# args.append(f"--pki-dir={self._pki_dir}")
|
||||
args.append("gen-dh")
|
||||
|
||||
rc, out, err = self._exec(args)
|
||||
_output: Union[str, List[str]] = self.result_values(out, err)
|
||||
|
||||
dh_pem_file = os.path.join(self.easyrsa_directory, "pki", "dh.pem")
|
||||
|
||||
if os.path.exists(dh_pem_file):
|
||||
rc = 0
|
||||
_output = "dh.pem were successfully created."
|
||||
else:
|
||||
rc = 3
|
||||
|
||||
return (rc, True, _output)
|
||||
|
||||
# ----------------------------------------------------------------------------------------------
|
||||
# PRIVATE API - validate
|
||||
def validate_pki(self) -> bool:
|
||||
"""
|
||||
Check whether the PKI directory exists.
|
||||
|
||||
Returns:
|
||||
bool: True if ``self.pki_dir`` exists on disk, otherwise False.
|
||||
"""
|
||||
self.module.log(msg="EasyRsa::validate_pki()")
|
||||
|
||||
if os.path.exists(self.pki_dir):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def validate_ca(self) -> bool:
|
||||
"""
|
||||
Check whether the CA certificate and key exist.
|
||||
|
||||
Expected files (relative to ``self.pki_dir``):
|
||||
- ``ca.crt``
|
||||
- ``private/ca.key``
|
||||
|
||||
Returns:
|
||||
bool: True if both CA files exist, otherwise False.
|
||||
"""
|
||||
self.module.log(msg="EasyRsa::validate__ca()")
|
||||
|
||||
ca_crt_file = os.path.join(self.pki_dir, "ca.crt")
|
||||
ca_key_file = os.path.join(self.pki_dir, "private", "ca.key")
|
||||
|
||||
if os.path.exists(ca_crt_file) and os.path.exists(ca_key_file):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def validate_crl(self) -> bool:
|
||||
"""
|
||||
Check whether the CRL exists.
|
||||
|
||||
Expected file (relative to ``self.pki_dir``):
|
||||
- ``crl.pem``
|
||||
|
||||
Returns:
|
||||
bool: True if CRL exists, otherwise False.
|
||||
"""
|
||||
self.module.log(msg="EasyRsa::validate__crl()")
|
||||
|
||||
crl_pem_file = os.path.join(self.pki_dir, "crl.pem")
|
||||
|
||||
if os.path.exists(crl_pem_file):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def validate_dh(self) -> bool:
|
||||
"""
|
||||
Check whether the DH parameters file exists.
|
||||
|
||||
Expected file (relative to ``self.pki_dir``):
|
||||
- ``dh.pem``
|
||||
|
||||
Returns:
|
||||
bool: True if DH params exist, otherwise False.
|
||||
"""
|
||||
self.module.log(msg="EasyRsa::validate__dh()")
|
||||
|
||||
dh_pem_file = os.path.join(self.pki_dir, "dh.pem")
|
||||
|
||||
if os.path.exists(dh_pem_file):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def validate_req(self) -> bool:
|
||||
"""
|
||||
Check whether the server request (CSR) exists.
|
||||
|
||||
Expected file (relative to ``self.pki_dir``):
|
||||
- ``reqs/<req_cn_server>.req``
|
||||
|
||||
Returns:
|
||||
bool: True if the CSR exists, otherwise False.
|
||||
"""
|
||||
self.module.log(msg="EasyRsa::validate__req()")
|
||||
|
||||
req_file = os.path.join(self.pki_dir, "reqs", f"{self.req_cn_server}.req")
|
||||
|
||||
if os.path.exists(req_file):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def validate_sign(self) -> bool:
|
||||
"""
|
||||
Check whether the signed server certificate exists.
|
||||
|
||||
Expected file (relative to ``self.pki_dir``):
|
||||
- ``issued/<req_cn_server>.crt``
|
||||
|
||||
Returns:
|
||||
bool: True if the certificate exists, otherwise False.
|
||||
"""
|
||||
self.module.log(msg="EasyRsa::validate__sign()")
|
||||
|
||||
crt_file = os.path.join(self.pki_dir, "issued", f"{self.req_cn_server}.crt")
|
||||
|
||||
if os.path.exists(crt_file):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
# ----------------------------------------------------------------------------------------------
|
||||
|
||||
def _exec(self, commands: Sequence[str], check_rc: bool = False) -> ExecResult:
|
||||
"""
|
||||
Execute a command via the underlying Ansible module.
|
||||
|
||||
Args:
|
||||
commands: Command and arguments as a sequence of strings.
|
||||
check_rc: Passed through to ``module.run_command``; when True, the
|
||||
module may raise/fail on non-zero return codes depending on its behavior.
|
||||
|
||||
Returns:
|
||||
tuple[int, str, str]: (rc, stdout, stderr)
|
||||
rc: Process return code.
|
||||
stdout: Captured standard output.
|
||||
stderr: Captured standard error.
|
||||
"""
|
||||
self.module.log(msg=f"_exec(commands={commands}, check_rc={check_rc}")
|
||||
|
||||
rc, out, err = self.module.run_command(commands, check_rc=check_rc)
|
||||
|
||||
if int(rc) != 0:
|
||||
self.module.log(msg=f" rc : '{rc}'")
|
||||
self.module.log(msg=f" out: '{out}'")
|
||||
self.module.log(msg=f" err: '{err}'")
|
||||
|
||||
return rc, out, err
|
||||
|
||||
def result_values(self, out: str, err: str) -> List[str]:
|
||||
"""
|
||||
Merge stdout and stderr into a single list of output lines.
|
||||
|
||||
Args:
|
||||
out: Raw stdout string.
|
||||
err: Raw stderr string.
|
||||
|
||||
Returns:
|
||||
list[str]: Concatenated list of lines (stdout lines first, then stderr lines).
|
||||
"""
|
||||
_out = out.splitlines()
|
||||
_err = err.splitlines()
|
||||
_output: List[str] = []
|
||||
_output += _out
|
||||
_output += _err
|
||||
# self.module.log(msg=f"= output: {_output}")
|
||||
return _output
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def create_link(source, destination, force=False):
|
||||
"""
|
||||
create a link ..
|
||||
"""
|
||||
if force:
|
||||
os.remove(destination)
|
||||
os.symlink(source, destination)
|
||||
else:
|
||||
os.symlink(source, destination)
|
||||
|
||||
|
||||
def remove_file(file_name):
|
||||
""" """
|
||||
if os.path.exists(file_name):
|
||||
os.remove(file_name)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def chmod(file_name, mode):
|
||||
""" """
|
||||
if os.path.exists(file_name):
|
||||
if mode is not None:
|
||||
os.chmod(file_name, int(mode, base=8))
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
||||
def find_in_list(list, value):
|
||||
""" """
|
||||
for entry in list:
|
||||
for k, v in entry.items():
|
||||
if k == value:
|
||||
return entry
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def compare_two_lists(list1: list, list2: list, debug=False):
|
||||
"""
|
||||
Compare two lists and logs the difference.
|
||||
:param list1: first list.
|
||||
:param list2: second list.
|
||||
:return: if there is difference between both lists.
|
||||
"""
|
||||
debug_msg = []
|
||||
|
||||
diff = [x for x in list2 if x not in list1]
|
||||
|
||||
changed = not (len(diff) == 0)
|
||||
if debug:
|
||||
if not changed:
|
||||
debug_msg.append(f"There are {len(diff)} differences:")
|
||||
debug_msg.append(f" {diff[:5]}")
|
||||
|
||||
return changed, diff, debug_msg
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Iterable, Tuple
|
||||
|
||||
ResultEntry = Dict[str, Dict[str, Any]]
|
||||
ResultState = Iterable[ResultEntry]
|
||||
|
||||
ResultsReturn = Tuple[
|
||||
bool, # has_state
|
||||
bool, # has_changed
|
||||
bool, # has_failed
|
||||
Dict[str, Dict[str, Any]], # state
|
||||
Dict[str, Dict[str, Any]], # changed
|
||||
Dict[str, Dict[str, Any]], # failed
|
||||
]
|
||||
|
||||
|
||||
def results(module: Any, result_state: ResultState) -> ResultsReturn:
|
||||
"""
|
||||
Aggregate per-item module results into combined state/changed/failed maps.
|
||||
|
||||
The function expects an iterable of dictionaries, where each dictionary maps
|
||||
an item identifier (e.g. a container name) to a dict containing optional keys
|
||||
like ``state``, ``changed``, and ``failed``.
|
||||
|
||||
Example input:
|
||||
[
|
||||
{"busybox-1": {"state": "container.env written", "changed": True}},
|
||||
{"hello-world-1": {"state": "hello-world-1.properties written"}},
|
||||
{"nginx-1": {"failed": True, "msg": "..." }}
|
||||
]
|
||||
|
||||
Args:
|
||||
module: An Ansible-like module object. Currently unused (kept for API symmetry
|
||||
and optional debugging/logging).
|
||||
result_state: Iterable of per-item result dictionaries as described above.
|
||||
|
||||
Returns:
|
||||
tuple[bool, bool, bool, dict[str, dict[str, Any]], dict[str, dict[str, Any]], dict[str, dict[str, Any]]]:
|
||||
(has_state, has_changed, has_failed, state, changed, failed)
|
||||
|
||||
has_state:
|
||||
True if at least one item dict contains a truthy ``"state"`` key.
|
||||
has_changed:
|
||||
True if at least one item dict contains a truthy ``"changed"`` key.
|
||||
has_failed:
|
||||
True if at least one item dict contains a truthy ``"failed"`` key.
|
||||
state:
|
||||
Mapping of item_id -> item_dict for all items with a truthy ``"state"``.
|
||||
changed:
|
||||
Mapping of item_id -> item_dict for all items with a truthy ``"changed"``.
|
||||
failed:
|
||||
Mapping of item_id -> item_dict for all items with a truthy ``"failed"``.
|
||||
|
||||
Notes:
|
||||
If the same item_id appears multiple times in ``result_state``, later entries
|
||||
overwrite earlier ones during the merge step.
|
||||
"""
|
||||
|
||||
# module.log(msg=f"{result_state}")
|
||||
|
||||
combined_d: Dict[str, Dict[str, Any]] = {
|
||||
key: value for d in result_state for key, value in d.items()
|
||||
}
|
||||
|
||||
state: Dict[str, Dict[str, Any]] = {
|
||||
k: v for k, v in combined_d.items() if isinstance(v, dict) and v.get("state")
|
||||
}
|
||||
changed: Dict[str, Dict[str, Any]] = {
|
||||
k: v for k, v in combined_d.items() if isinstance(v, dict) and v.get("changed")
|
||||
}
|
||||
failed: Dict[str, Dict[str, Any]] = {
|
||||
k: v for k, v in combined_d.items() if isinstance(v, dict) and v.get("failed")
|
||||
}
|
||||
|
||||
has_state = len(state) > 0
|
||||
has_changed = len(changed) > 0
|
||||
has_failed = len(failed) > 0
|
||||
|
||||
return (has_state, has_changed, has_failed, state, changed, failed)
|
||||
|
|
@ -1,210 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
"""
|
||||
Compatibility helpers for using passlib 1.7.4 with bcrypt 5.x.
|
||||
|
||||
Background
|
||||
----------
|
||||
passlib 1.7.4 performs a bcrypt backend self-test during import that uses a test
|
||||
secret longer than 72 bytes. bcrypt 5.x raises a ValueError for inputs longer
|
||||
than 72 bytes instead of silently truncating. This can abort imports of
|
||||
passlib.apache (and other passlib components) even before user code runs.
|
||||
|
||||
This module applies a targeted runtime patch:
|
||||
- Patch bcrypt.hashpw/checkpw to truncate inputs to 72 bytes (bcrypt's effective
|
||||
input limit) so that passlib's self-tests do not crash.
|
||||
- Patch passlib.handlers.bcrypt.detect_wrap_bug() to handle the ValueError and
|
||||
proceed with the wraparound test.
|
||||
|
||||
The patch restores passlib importability on systems that ship passlib 1.7.4
|
||||
together with bcrypt 5.x.
|
||||
"""
|
||||
|
||||
import importlib.metadata
|
||||
from importlib.metadata import PackageNotFoundError
|
||||
|
||||
|
||||
def _major_version(dist_name: str) -> int | None:
|
||||
"""
|
||||
Return the major version number of an installed distribution.
|
||||
|
||||
The value is derived from ``importlib.metadata.version(dist_name)`` and then
|
||||
parsed as the leading numeric component.
|
||||
|
||||
Args:
|
||||
dist_name: The distribution name as used by importlib metadata
|
||||
(e.g. "passlib", "bcrypt").
|
||||
|
||||
Returns:
|
||||
The major version as an integer, or ``None`` if the distribution is not
|
||||
installed or the version string cannot be interpreted.
|
||||
"""
|
||||
try:
|
||||
v = importlib.metadata.version(dist_name)
|
||||
except PackageNotFoundError:
|
||||
return None
|
||||
|
||||
# Extract the first dot-separated segment and keep digits only
|
||||
# (works for typical versions like "5.0.1", "5rc1", "5.post1", etc.).
|
||||
head = v.split(".", 1)[0]
|
||||
try:
|
||||
return int("".join(ch for ch in head if ch.isdigit()) or head)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def apply_passlib_bcrypt5_compat(module) -> None:
|
||||
"""
|
||||
Apply runtime patches to make passlib 1.7.4 work with bcrypt 5.x.
|
||||
|
||||
What this does
|
||||
-------------
|
||||
1) Patches ``bcrypt.hashpw`` and ``bcrypt.checkpw`` to truncate any password
|
||||
input longer than 72 bytes to 72 bytes. This prevents bcrypt 5.x from
|
||||
raising ``ValueError`` when passlib runs its internal self-tests during
|
||||
import. The patch is applied only once per Python process.
|
||||
|
||||
2) Patches ``passlib.handlers.bcrypt.detect_wrap_bug`` to tolerate the
|
||||
bcrypt 5.x ``ValueError`` during the wraparound self-test and continue
|
||||
the test using a 72-byte truncated secret.
|
||||
|
||||
Preconditions
|
||||
-------------
|
||||
This function is a no-op unless:
|
||||
- passlib is installed and its major version is 1, and
|
||||
- bcrypt is installed and its major version is >= 5.
|
||||
|
||||
Logging
|
||||
-------
|
||||
The function uses ``module.log(...)`` for diagnostic messages. The passed
|
||||
``module`` is expected to be an AnsibleModule (or a compatible object).
|
||||
|
||||
Important
|
||||
---------
|
||||
This patch does not remove bcrypt's effective 72-byte input limit. bcrypt
|
||||
inherently only considers the first 72 bytes of a password. The patch
|
||||
merely restores the historical "truncate silently" behavior in bcrypt 5.x
|
||||
so that older passlib versions keep working.
|
||||
|
||||
Args:
|
||||
module: An object providing ``log(str)``. Typically an instance of
|
||||
``ansible.module_utils.basic.AnsibleModule``.
|
||||
|
||||
Returns:
|
||||
None. The patch is applied in-place to the imported modules.
|
||||
"""
|
||||
module.log("apply_passlib_bcrypt5_compat()")
|
||||
|
||||
passlib_major = _major_version("passlib")
|
||||
bcrypt_major = _major_version("bcrypt")
|
||||
|
||||
module.log(f" - passlib_major {passlib_major}")
|
||||
module.log(f" - bcrypt_major {bcrypt_major}")
|
||||
|
||||
if passlib_major is None or bcrypt_major is None:
|
||||
return
|
||||
if bcrypt_major < 5:
|
||||
return
|
||||
|
||||
# --- Patch 1: bcrypt itself (so passlib self-tests don't crash) ---
|
||||
import bcrypt as _bcrypt # bcrypt package
|
||||
|
||||
if not getattr(_bcrypt, "_passlib_compat_applied", False):
|
||||
_orig_hashpw = _bcrypt.hashpw
|
||||
_orig_checkpw = _bcrypt.checkpw
|
||||
|
||||
def hashpw(secret: bytes, salt: bytes) -> bytes:
|
||||
"""
|
||||
Wrapper around bcrypt.hashpw that truncates secrets to 72 bytes.
|
||||
|
||||
Args:
|
||||
secret: Password bytes to hash.
|
||||
salt: bcrypt salt/config blob.
|
||||
|
||||
Returns:
|
||||
The bcrypt hash as bytes.
|
||||
"""
|
||||
if isinstance(secret, bytearray):
|
||||
secret = bytes(secret)
|
||||
if len(secret) > 72:
|
||||
secret = secret[:72]
|
||||
return _orig_hashpw(secret, salt)
|
||||
|
||||
def checkpw(secret: bytes, hashed: bytes) -> bool:
|
||||
"""
|
||||
Wrapper around bcrypt.checkpw that truncates secrets to 72 bytes.
|
||||
|
||||
Args:
|
||||
secret: Password bytes to verify.
|
||||
hashed: Existing bcrypt hash.
|
||||
|
||||
Returns:
|
||||
True if the password matches, otherwise False.
|
||||
"""
|
||||
if isinstance(secret, bytearray):
|
||||
secret = bytes(secret)
|
||||
if len(secret) > 72:
|
||||
secret = secret[:72]
|
||||
return _orig_checkpw(secret, hashed)
|
||||
|
||||
_bcrypt.hashpw = hashpw # type: ignore[assignment]
|
||||
_bcrypt.checkpw = checkpw # type: ignore[assignment]
|
||||
_bcrypt._passlib_compat_applied = True
|
||||
|
||||
module.log(" - patched bcrypt.hashpw/checkpw for >72 truncation")
|
||||
|
||||
# --- Patch 2: passlib detect_wrap_bug() (handle bcrypt>=5 behavior) ---
|
||||
import passlib.handlers.bcrypt as pl_bcrypt # noqa: WPS433 (runtime patch)
|
||||
|
||||
if getattr(pl_bcrypt, "_bcrypt5_compat_applied", False):
|
||||
return
|
||||
|
||||
def detect_wrap_bug_patched(ident: str) -> bool:
|
||||
"""
|
||||
Replacement for passlib.handlers.bcrypt.detect_wrap_bug().
|
||||
|
||||
passlib's original implementation performs a detection routine to test
|
||||
for a historical bcrypt "wraparound" bug. The routine uses a test secret
|
||||
longer than 72 bytes. With bcrypt 5.x, this can raise ``ValueError``.
|
||||
This patched version catches that error, truncates the secret to 72
|
||||
bytes, and completes the verification checks.
|
||||
|
||||
Args:
|
||||
ident: The bcrypt identifier prefix (e.g. "$2a$", "$2b$", etc.)
|
||||
as provided by passlib.
|
||||
|
||||
Returns:
|
||||
True if the backend appears to exhibit the wraparound bug,
|
||||
otherwise False.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If the backend fails the expected self-test checks.
|
||||
"""
|
||||
secret = (b"0123456789" * 26)[:255]
|
||||
|
||||
bug_hash = (
|
||||
ident.encode("ascii")
|
||||
+ b"04$R1lJ2gkNaoPGdafE.H.16.nVyh2niHsGJhayOHLMiXlI45o8/DU.6"
|
||||
)
|
||||
try:
|
||||
if pl_bcrypt.bcrypt.verify(secret, bug_hash):
|
||||
return True
|
||||
except ValueError:
|
||||
# bcrypt>=5 kann bei >72 Bytes explizit ValueError werfen
|
||||
secret = secret[:72]
|
||||
|
||||
correct_hash = (
|
||||
ident.encode("ascii")
|
||||
+ b"04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi"
|
||||
)
|
||||
if not pl_bcrypt.bcrypt.verify(secret, correct_hash):
|
||||
raise RuntimeError(
|
||||
f"bcrypt backend failed wraparound self-test for ident={ident!r}"
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
pl_bcrypt.detect_wrap_bug = detect_wrap_bug_patched # type: ignore[assignment]
|
||||
pl_bcrypt._bcrypt5_compat_applied = True
|
||||
|
||||
module.log(" - patched passlib.handlers.bcrypt.detect_wrap_bug")
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import json
|
||||
|
||||
from jinja2 import Template
|
||||
|
||||
# from ansible_collections.bodsch.core.plugins.module_utils.checksum import Checksum
|
||||
|
||||
|
||||
class TemplateHandler:
|
||||
""" """
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
|
||||
def write_template(self, file_name, template, data):
|
||||
""" """
|
||||
if isinstance(data, dict):
|
||||
"""
|
||||
sort data
|
||||
"""
|
||||
data = json.dumps(data, sort_keys=True)
|
||||
if isinstance(data, str):
|
||||
data = json.loads(data)
|
||||
|
||||
if isinstance(data, list):
|
||||
data = ":".join(data)
|
||||
|
||||
tm = Template(template, trim_blocks=True, lstrip_blocks=True)
|
||||
d = tm.render(item=data)
|
||||
|
||||
with open(file_name, "w") as f:
|
||||
f.write(d)
|
||||
|
||||
def write_when_changed(self, tmp_file, data_file, **kwargs):
|
||||
""" """
|
||||
self.module.log(f"write_when_changed(self, {tmp_file}, {data_file}, {kwargs})")
|
||||
|
||||
# checksum = Checksum(self.module)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# OBSOLETE, BUT STILL SUPPORTED FOR COMPATIBILITY REASONS
|
||||
def write_template(file_name, template, data):
|
||||
""" """
|
||||
if isinstance(data, dict):
|
||||
"""
|
||||
sort data
|
||||
"""
|
||||
data = json.dumps(data, sort_keys=True)
|
||||
if isinstance(data, str):
|
||||
data = json.loads(data)
|
||||
|
||||
if isinstance(data, list):
|
||||
data = ":".join(data)
|
||||
|
||||
tm = Template(template, trim_blocks=True, lstrip_blocks=True)
|
||||
d = tm.render(item=data)
|
||||
|
||||
with open(file_name, "w") as f:
|
||||
f.write(d)
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
||||
def validate(value, default=None):
|
||||
""" """
|
||||
if value:
|
||||
if isinstance(value, str) or isinstance(value, list) or isinstance(value, dict):
|
||||
if len(value) > 0:
|
||||
return value
|
||||
|
||||
if isinstance(value, int):
|
||||
return int(value)
|
||||
|
||||
if isinstance(value, bool):
|
||||
return bool(value)
|
||||
|
||||
return default
|
||||
|
|
@ -1,741 +0,0 @@
|
|||
"""
|
||||
binary_deploy_impl.py
|
||||
|
||||
Idempotent deployment helper for versioned binaries with activation symlinks.
|
||||
|
||||
This module is intended to be used from Ansible modules (and optionally an action plugin)
|
||||
to deploy one or multiple binaries into a versioned installation directory and activate
|
||||
them via symlinks (e.g. /usr/bin/<name> -> <install_dir>/<name>).
|
||||
|
||||
Key features:
|
||||
- Optional copy from a remote staging directory (remote -> remote) with atomic replacement.
|
||||
- Permission and ownership enforcement (mode/owner/group).
|
||||
- Optional Linux file capabilities via getcap/setcap with normalized, idempotent comparison.
|
||||
- Activation detection based on symlink target.
|
||||
|
||||
Public API:
|
||||
- BinaryDeploy.run(): reads AnsibleModule params and returns module JSON via exit_json/fail_json.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import grp
|
||||
import hashlib
|
||||
import os
|
||||
import pwd
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
_CHUNK_SIZE = 1024 * 1024
|
||||
_CAP_ENTRY_RE = re.compile(r"^(cap_[a-z0-9_]+)([=+])([a-z]+)$", re.IGNORECASE)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class BinaryItem:
|
||||
"""A single deployable binary with optional activation name and capability."""
|
||||
|
||||
name: str
|
||||
src: str
|
||||
link_name: str
|
||||
capability: Optional[str]
|
||||
|
||||
|
||||
class _PathOps:
|
||||
"""Filesystem helper methods used by the deployment logic."""
|
||||
|
||||
@staticmethod
|
||||
def sha256_file(path: str) -> str:
|
||||
"""
|
||||
Calculate the SHA-256 checksum of a file.
|
||||
|
||||
Args:
|
||||
path: Path to the file.
|
||||
|
||||
Returns:
|
||||
Hex-encoded SHA-256 digest.
|
||||
"""
|
||||
h = hashlib.sha256()
|
||||
with open(path, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(_CHUNK_SIZE), b""):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
@staticmethod
|
||||
def files_equal(src: str, dst: str) -> bool:
|
||||
"""
|
||||
Compare two files for equality by size and SHA-256 checksum.
|
||||
|
||||
This is used to decide whether a copy is required.
|
||||
|
||||
Args:
|
||||
src: Source file path.
|
||||
dst: Destination file path.
|
||||
|
||||
Returns:
|
||||
True if both files exist and are byte-identical, otherwise False.
|
||||
"""
|
||||
if os.path.abspath(src) == os.path.abspath(dst):
|
||||
return True
|
||||
try:
|
||||
if os.path.samefile(src, dst):
|
||||
return True
|
||||
except FileNotFoundError:
|
||||
return False
|
||||
except OSError:
|
||||
# samefile may fail on some filesystems; fall back to hashing
|
||||
pass
|
||||
|
||||
try:
|
||||
s1 = os.stat(src)
|
||||
s2 = os.stat(dst)
|
||||
except FileNotFoundError:
|
||||
return False
|
||||
|
||||
if s1.st_size != s2.st_size:
|
||||
return False
|
||||
|
||||
# Hashing is the expensive path; size match is a cheap early filter.
|
||||
return _PathOps.sha256_file(src) == _PathOps.sha256_file(dst)
|
||||
|
||||
@staticmethod
|
||||
def ensure_dir(path: str) -> bool:
|
||||
"""
|
||||
Ensure a directory exists.
|
||||
|
||||
Args:
|
||||
path: Directory path.
|
||||
|
||||
Returns:
|
||||
True if the directory was created, otherwise False.
|
||||
"""
|
||||
if os.path.isdir(path):
|
||||
return False
|
||||
os.makedirs(path, exist_ok=True)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def safe_rmtree(path: str) -> None:
|
||||
"""
|
||||
Remove a directory tree with a minimal safety guard.
|
||||
|
||||
Args:
|
||||
path: Directory to remove.
|
||||
|
||||
Raises:
|
||||
ValueError: If the path is empty or points to '/'.
|
||||
"""
|
||||
if not path or os.path.abspath(path) in ("/",):
|
||||
raise ValueError(f"Refusing to remove unsafe path: {path}")
|
||||
shutil.rmtree(path)
|
||||
|
||||
@staticmethod
|
||||
def is_symlink_to(link_path: str, target_path: str) -> bool:
|
||||
"""
|
||||
Check whether link_path is a symlink pointing to target_path.
|
||||
|
||||
Args:
|
||||
link_path: Symlink location.
|
||||
target_path: Expected symlink target.
|
||||
|
||||
Returns:
|
||||
True if link_path is a symlink to target_path, otherwise False.
|
||||
"""
|
||||
try:
|
||||
if not os.path.islink(link_path):
|
||||
return False
|
||||
current = os.readlink(link_path)
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
# Normalize relative symlinks to absolute for comparison.
|
||||
if not os.path.isabs(current):
|
||||
current = os.path.abspath(os.path.join(os.path.dirname(link_path), current))
|
||||
|
||||
return os.path.abspath(current) == os.path.abspath(target_path)
|
||||
|
||||
@staticmethod
|
||||
def ensure_symlink(link_path: str, target_path: str) -> bool:
|
||||
"""
|
||||
Ensure link_path is a symlink to target_path.
|
||||
|
||||
Args:
|
||||
link_path: Symlink location.
|
||||
target_path: Symlink target.
|
||||
|
||||
Returns:
|
||||
True if the symlink was created/updated, otherwise False.
|
||||
"""
|
||||
if _PathOps.is_symlink_to(link_path, target_path):
|
||||
return False
|
||||
|
||||
# Replace existing file/link.
|
||||
try:
|
||||
os.lstat(link_path)
|
||||
os.unlink(link_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
os.symlink(target_path, link_path)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def atomic_copy(src: str, dst: str) -> None:
|
||||
"""
|
||||
Copy a file to dst atomically (write to temp file and rename).
|
||||
|
||||
Args:
|
||||
src: Source file path.
|
||||
dst: Destination file path.
|
||||
"""
|
||||
dst_dir = os.path.dirname(dst)
|
||||
_PathOps.ensure_dir(dst_dir)
|
||||
|
||||
fd, tmp_path = tempfile.mkstemp(prefix=".ansible-binary-", dir=dst_dir)
|
||||
os.close(fd)
|
||||
try:
|
||||
shutil.copyfile(src, tmp_path)
|
||||
os.replace(tmp_path, dst)
|
||||
finally:
|
||||
try:
|
||||
os.unlink(tmp_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
class _Identity:
|
||||
"""User/group resolution helpers."""
|
||||
|
||||
@staticmethod
|
||||
def resolve_uid(owner: Optional[str]) -> Optional[int]:
|
||||
"""
|
||||
Resolve a user name or uid string to a numeric uid.
|
||||
|
||||
Args:
|
||||
owner: User name or numeric uid as string.
|
||||
|
||||
Returns:
|
||||
Numeric uid, or None if owner is None.
|
||||
"""
|
||||
if owner is None:
|
||||
return None
|
||||
if owner.isdigit():
|
||||
return int(owner)
|
||||
return pwd.getpwnam(owner).pw_uid
|
||||
|
||||
@staticmethod
|
||||
def resolve_gid(group: Optional[str]) -> Optional[int]:
|
||||
"""
|
||||
Resolve a group name or gid string to a numeric gid.
|
||||
|
||||
Args:
|
||||
group: Group name or numeric gid as string.
|
||||
|
||||
Returns:
|
||||
Numeric gid, or None if group is None.
|
||||
"""
|
||||
if group is None:
|
||||
return None
|
||||
if group.isdigit():
|
||||
return int(group)
|
||||
return grp.getgrnam(group).gr_gid
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _CapsValue:
|
||||
"""Normalized representation of Linux file capabilities."""
|
||||
|
||||
value: str
|
||||
|
||||
@staticmethod
|
||||
def normalize(raw: str) -> "_CapsValue":
|
||||
"""
|
||||
Normalize capability strings so that setcap-style and getcap-style
|
||||
representations compare equal.
|
||||
|
||||
Examples:
|
||||
- "cap_net_raw+ep" -> "cap_net_raw=ep"
|
||||
- "cap_net_raw=pe" -> "cap_net_raw=ep"
|
||||
- "cap_a+e, cap_b=ip" -> "cap_a=e,cap_b=ip"
|
||||
"""
|
||||
s = (raw or "").strip()
|
||||
if not s:
|
||||
return _CapsValue("")
|
||||
|
||||
entries: List[str] = []
|
||||
for part in s.split(","):
|
||||
p = part.strip()
|
||||
if not p:
|
||||
continue
|
||||
|
||||
# Remove internal whitespace.
|
||||
p = " ".join(p.split())
|
||||
|
||||
m = _CAP_ENTRY_RE.match(p)
|
||||
if not m:
|
||||
# Unknown format: keep as-is (but trimmed).
|
||||
entries.append(p)
|
||||
continue
|
||||
|
||||
cap_name, _, flags = m.group(1), m.group(2), m.group(3)
|
||||
flags_norm = "".join(sorted(flags))
|
||||
# Canonical operator is '=' (getcap output style).
|
||||
entries.append(f"{cap_name}={flags_norm}")
|
||||
|
||||
entries.sort()
|
||||
return _CapsValue(",".join(entries))
|
||||
|
||||
|
||||
class _Caps:
|
||||
"""
|
||||
Linux file capabilities helper with idempotent detection via getcap/setcap.
|
||||
|
||||
The helper normalizes both desired and current values to avoid false positives,
|
||||
e.g. comparing 'cap_net_raw+ep' (setcap style) and 'cap_net_raw=ep' (getcap style).
|
||||
"""
|
||||
|
||||
def __init__(self, module: AnsibleModule) -> None:
|
||||
self._module = module
|
||||
|
||||
def _parse_getcap_output(self, path: str, out: str) -> _CapsValue:
|
||||
"""
|
||||
Parse getcap output for a single path.
|
||||
|
||||
Supported formats:
|
||||
- "/path cap_net_raw=ep"
|
||||
- "/path = cap_net_raw=ep"
|
||||
- "/path cap_net_raw+ep" (rare, but normalize handles it)
|
||||
"""
|
||||
text = (out or "").strip()
|
||||
if not text:
|
||||
return _CapsValue("")
|
||||
|
||||
for line in text.splitlines():
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
# Example lines:
|
||||
# /usr/bin/ping = cap_net_raw+ep
|
||||
# /usr/bin/ping cap_net_raw=ep
|
||||
if line.startswith(path):
|
||||
_path_len = len(path)
|
||||
rest = line[_path_len:].strip()
|
||||
|
||||
# Strip optional leading '=' or split form.
|
||||
if rest.startswith("="):
|
||||
rest = rest[1:].strip()
|
||||
|
||||
tokens = rest.split()
|
||||
if tokens and tokens[0] == "=":
|
||||
rest = " ".join(tokens[1:]).strip()
|
||||
|
||||
return _CapsValue.normalize(rest)
|
||||
|
||||
# Fallback: if getcap returned a single line but path formatting differs.
|
||||
first = text.splitlines()[0].strip()
|
||||
tokens = first.split()
|
||||
if len(tokens) >= 2:
|
||||
if tokens[1] == "=" and len(tokens) >= 3:
|
||||
return _CapsValue.normalize(" ".join(tokens[2:]))
|
||||
return _CapsValue.normalize(" ".join(tokens[1:]))
|
||||
|
||||
return _CapsValue("")
|
||||
|
||||
def get_current(self, path: str) -> Optional[_CapsValue]:
|
||||
"""
|
||||
Get the current capability set for a file.
|
||||
|
||||
Returns:
|
||||
- _CapsValue("") for no capabilities
|
||||
- _CapsValue("cap_xxx=ep") for set capabilities
|
||||
- None if getcap is missing (cannot do idempotent checks)
|
||||
"""
|
||||
rc, out, err = self._module.run_command(["getcap", path])
|
||||
if rc == 127:
|
||||
return None
|
||||
if rc != 0:
|
||||
msg = (err or "").strip()
|
||||
# No capabilities can be signaled via non-zero return with empty output.
|
||||
if msg and "No such file" in msg:
|
||||
self._module.fail_json(msg=f"getcap failed: {msg}", path=path)
|
||||
return _CapsValue("")
|
||||
return self._parse_getcap_output(path, out)
|
||||
|
||||
def ensure(self, path: str, desired: str) -> bool:
|
||||
"""
|
||||
Ensure the desired capability is present on 'path'.
|
||||
|
||||
Args:
|
||||
path: File path.
|
||||
desired: Capability string (setcap/getcap style), e.g. "cap_net_raw+ep".
|
||||
|
||||
Returns:
|
||||
True if a change was applied, otherwise False.
|
||||
|
||||
Raises:
|
||||
AnsibleModule.fail_json on errors or if getcap is missing.
|
||||
"""
|
||||
desired_norm = _CapsValue.normalize(desired)
|
||||
current = self.get_current(path)
|
||||
|
||||
if current is None:
|
||||
self._module.fail_json(
|
||||
msg="getcap is required for idempotent capability management",
|
||||
hint="Install libcap tools (e.g. Debian/Ubuntu: 'libcap2-bin')",
|
||||
path=path,
|
||||
desired=desired_norm.value,
|
||||
)
|
||||
|
||||
if current.value == desired_norm.value:
|
||||
return False
|
||||
|
||||
# setcap accepts both '+ep' and '=ep', but we pass canonical '=...'.
|
||||
rc, out, err = self._module.run_command(["setcap", desired_norm.value, path])
|
||||
if rc != 0:
|
||||
msg = (err or out or "").strip() or "setcap failed"
|
||||
self._module.fail_json(msg=msg, path=path, capability=desired_norm.value)
|
||||
|
||||
verified = self.get_current(path)
|
||||
if verified is None or verified.value != desired_norm.value:
|
||||
self._module.fail_json(
|
||||
msg="capability verification failed after setcap",
|
||||
path=path,
|
||||
desired=desired_norm.value,
|
||||
current=(verified.value if verified else None),
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class BinaryDeploy:
|
||||
"""
|
||||
Deployment engine used by Ansible modules.
|
||||
|
||||
The instance consumes module parameters, plans whether an update is necessary,
|
||||
and then applies changes idempotently:
|
||||
- copy (optional)
|
||||
- permissions and ownership
|
||||
- capabilities (optional)
|
||||
- activation symlink
|
||||
"""
|
||||
|
||||
def __init__(self, module: AnsibleModule) -> None:
|
||||
self._module = module
|
||||
self._module.log("BinaryDeploy::__init__()")
|
||||
self._caps = _Caps(module)
|
||||
|
||||
@staticmethod
|
||||
def _parse_mode(mode: Any) -> int:
|
||||
"""
|
||||
Parse a file mode parameter into an int.
|
||||
|
||||
Args:
|
||||
mode: Octal mode as string (e.g. "0755") or int.
|
||||
|
||||
Returns:
|
||||
Parsed mode as int.
|
||||
"""
|
||||
if isinstance(mode, int):
|
||||
return mode
|
||||
s = str(mode).strip()
|
||||
return int(s, 8)
|
||||
|
||||
def _resolve_uid_gid(
|
||||
self, owner: Optional[str], group: Optional[str]
|
||||
) -> Tuple[Optional[int], Optional[int]]:
|
||||
"""
|
||||
Resolve owner/group into numeric uid/gid.
|
||||
|
||||
Raises:
|
||||
ValueError: If the user or group does not exist.
|
||||
"""
|
||||
try:
|
||||
return _Identity.resolve_uid(owner), _Identity.resolve_gid(group)
|
||||
except KeyError as exc:
|
||||
raise ValueError(str(exc)) from exc
|
||||
|
||||
def _parse_items(self, raw: List[Dict[str, Any]]) -> List[BinaryItem]:
|
||||
"""
|
||||
Parse module 'items' parameter into BinaryItem objects.
|
||||
|
||||
Each raw item supports:
|
||||
- name (required)
|
||||
- src (optional, defaults to name)
|
||||
- link_name (optional, defaults to name)
|
||||
- capability (optional)
|
||||
"""
|
||||
self._module.log(f"BinaryDeploy::_parse_items(raw: {raw})")
|
||||
|
||||
items: List[BinaryItem] = []
|
||||
for it in raw:
|
||||
name = str(it["name"])
|
||||
src = str(it.get("src") or name)
|
||||
link_name = str(it.get("link_name") or name)
|
||||
cap = it.get("capability")
|
||||
items.append(
|
||||
BinaryItem(
|
||||
name=name,
|
||||
src=src,
|
||||
link_name=link_name,
|
||||
capability=str(cap) if cap else None,
|
||||
)
|
||||
)
|
||||
return items
|
||||
|
||||
def _plan(
|
||||
self,
|
||||
*,
|
||||
install_dir: str,
|
||||
link_dir: str,
|
||||
src_dir: Optional[str],
|
||||
do_copy: bool,
|
||||
items: List[BinaryItem],
|
||||
activation_name: str,
|
||||
owner: Optional[str],
|
||||
group: Optional[str],
|
||||
mode: int,
|
||||
) -> Tuple[bool, bool, Dict[str, Dict[str, bool]]]:
|
||||
"""
|
||||
Build an idempotent plan for all items.
|
||||
|
||||
Returns:
|
||||
Tuple of:
|
||||
- activated: whether the activation symlink points into install_dir
|
||||
- needs_update: whether any operation would be required
|
||||
- per_item_plan: dict(item.name -> {copy, perms, cap, link})
|
||||
"""
|
||||
self._module.log(
|
||||
"BinaryDeploy::_plan("
|
||||
f"install_dir: {install_dir}, link_dir: {link_dir}, src_dir: {src_dir}, "
|
||||
f"do_copy: {do_copy}, items: {items}, activation_name: {activation_name}, "
|
||||
f"owner: {owner}, group: {group}, mode: {mode})"
|
||||
)
|
||||
|
||||
activation = next(
|
||||
(
|
||||
i
|
||||
for i in items
|
||||
if i.name == activation_name or i.link_name == activation_name
|
||||
),
|
||||
items[0],
|
||||
)
|
||||
activation_target = os.path.join(install_dir, activation.name)
|
||||
activation_link = os.path.join(link_dir, activation.link_name)
|
||||
activated = os.path.isfile(activation_target) and _PathOps.is_symlink_to(
|
||||
activation_link, activation_target
|
||||
)
|
||||
|
||||
try:
|
||||
uid, gid = self._resolve_uid_gid(owner, group)
|
||||
except ValueError as exc:
|
||||
self._module.fail_json(msg=str(exc))
|
||||
|
||||
needs_update = False
|
||||
per_item: Dict[str, Dict[str, bool]] = {}
|
||||
|
||||
for item in items:
|
||||
dst = os.path.join(install_dir, item.name)
|
||||
lnk = os.path.join(link_dir, item.link_name)
|
||||
src = os.path.join(src_dir, item.src) if (do_copy and src_dir) else None
|
||||
|
||||
item_plan: Dict[str, bool] = {
|
||||
"copy": False,
|
||||
"perms": False,
|
||||
"cap": False,
|
||||
"link": False,
|
||||
}
|
||||
|
||||
if do_copy:
|
||||
if src is None:
|
||||
self._module.fail_json(
|
||||
msg="src_dir is required when copy=true", item=item.name
|
||||
)
|
||||
if not os.path.isfile(src):
|
||||
self._module.fail_json(
|
||||
msg="source binary missing on remote host",
|
||||
src=src,
|
||||
item=item.name,
|
||||
)
|
||||
if not os.path.exists(dst) or not _PathOps.files_equal(src, dst):
|
||||
item_plan["copy"] = True
|
||||
|
||||
# perms/ownership (if file missing, perms will be set later)
|
||||
try:
|
||||
st = os.stat(dst)
|
||||
if (st.st_mode & 0o7777) != mode:
|
||||
item_plan["perms"] = True
|
||||
if uid is not None and st.st_uid != uid:
|
||||
item_plan["perms"] = True
|
||||
if gid is not None and st.st_gid != gid:
|
||||
item_plan["perms"] = True
|
||||
except FileNotFoundError:
|
||||
item_plan["perms"] = True
|
||||
|
||||
if item.capability:
|
||||
desired_norm = _CapsValue.normalize(item.capability)
|
||||
|
||||
if not os.path.exists(dst):
|
||||
item_plan["cap"] = True
|
||||
else:
|
||||
current = self._caps.get_current(dst)
|
||||
if current is None:
|
||||
# getcap missing -> cannot validate, apply will fail in ensure().
|
||||
item_plan["cap"] = True
|
||||
elif current.value != desired_norm.value:
|
||||
item_plan["cap"] = True
|
||||
|
||||
if not _PathOps.is_symlink_to(lnk, dst):
|
||||
item_plan["link"] = True
|
||||
|
||||
if any(item_plan.values()):
|
||||
needs_update = True
|
||||
per_item[item.name] = item_plan
|
||||
|
||||
return activated, needs_update, per_item
|
||||
|
||||
def run(self) -> None:
|
||||
"""
|
||||
Execute the deployment based on module parameters.
|
||||
|
||||
Module parameters (expected):
|
||||
install_dir (str), link_dir (str), src_dir (optional str), copy (bool),
|
||||
items (list[dict]), activation_name (optional str),
|
||||
owner (optional str), group (optional str), mode (str),
|
||||
cleanup_on_failure (bool), check_only (bool).
|
||||
"""
|
||||
self._module.log("BinaryDeploy::run()")
|
||||
|
||||
p = self._module.params
|
||||
|
||||
install_dir: str = p["install_dir"]
|
||||
link_dir: str = p["link_dir"]
|
||||
src_dir: Optional[str] = p.get("src_dir")
|
||||
do_copy: bool = bool(p["copy"])
|
||||
cleanup_on_failure: bool = bool(p["cleanup_on_failure"])
|
||||
activation_name: str = str(p.get("activation_name") or "")
|
||||
|
||||
owner: Optional[str] = p.get("owner")
|
||||
group: Optional[str] = p.get("group")
|
||||
mode_int = self._parse_mode(p["mode"])
|
||||
|
||||
items = self._parse_items(p["items"])
|
||||
if not items:
|
||||
self._module.fail_json(msg="items must not be empty")
|
||||
|
||||
if not activation_name:
|
||||
activation_name = items[0].name
|
||||
|
||||
check_only: bool = bool(p["check_only"]) or bool(self._module.check_mode)
|
||||
|
||||
activated, needs_update, plan = self._plan(
|
||||
install_dir=install_dir,
|
||||
link_dir=link_dir,
|
||||
src_dir=src_dir,
|
||||
do_copy=do_copy,
|
||||
items=items,
|
||||
activation_name=activation_name,
|
||||
owner=owner,
|
||||
group=group,
|
||||
mode=mode_int,
|
||||
)
|
||||
|
||||
if check_only:
|
||||
self._module.exit_json(
|
||||
changed=False, activated=activated, needs_update=needs_update, plan=plan
|
||||
)
|
||||
|
||||
changed = False
|
||||
details: Dict[str, Dict[str, bool]] = {}
|
||||
|
||||
try:
|
||||
if _PathOps.ensure_dir(install_dir):
|
||||
changed = True
|
||||
|
||||
uid, gid = self._resolve_uid_gid(owner, group)
|
||||
|
||||
for item in items:
|
||||
src = os.path.join(src_dir, item.src) if (do_copy and src_dir) else None
|
||||
dst = os.path.join(install_dir, item.name)
|
||||
lnk = os.path.join(link_dir, item.link_name)
|
||||
|
||||
item_changed: Dict[str, bool] = {
|
||||
"copied": False,
|
||||
"perms": False,
|
||||
"cap": False,
|
||||
"link": False,
|
||||
}
|
||||
|
||||
if do_copy:
|
||||
if src is None:
|
||||
self._module.fail_json(
|
||||
msg="src_dir is required when copy=true", item=item.name
|
||||
)
|
||||
if not os.path.exists(dst) or not _PathOps.files_equal(src, dst):
|
||||
_PathOps.atomic_copy(src, dst)
|
||||
item_changed["copied"] = True
|
||||
changed = True
|
||||
|
||||
if not os.path.exists(dst):
|
||||
self._module.fail_json(
|
||||
msg="destination binary missing in install_dir",
|
||||
dst=dst,
|
||||
hint="In controller-local mode this indicates the transfer/copy stage did not create the file.",
|
||||
item=item.name,
|
||||
)
|
||||
|
||||
st = os.stat(dst)
|
||||
|
||||
if (st.st_mode & 0o7777) != mode_int:
|
||||
os.chmod(dst, mode_int)
|
||||
item_changed["perms"] = True
|
||||
changed = True
|
||||
|
||||
if uid is not None or gid is not None:
|
||||
new_uid = uid if uid is not None else st.st_uid
|
||||
new_gid = gid if gid is not None else st.st_gid
|
||||
if new_uid != st.st_uid or new_gid != st.st_gid:
|
||||
os.chown(dst, new_uid, new_gid)
|
||||
item_changed["perms"] = True
|
||||
changed = True
|
||||
|
||||
if item.capability:
|
||||
if self._caps.ensure(dst, item.capability):
|
||||
item_changed["cap"] = True
|
||||
changed = True
|
||||
|
||||
if _PathOps.ensure_symlink(lnk, dst):
|
||||
item_changed["link"] = True
|
||||
changed = True
|
||||
|
||||
details[item.name] = item_changed
|
||||
|
||||
except Exception as exc:
|
||||
if cleanup_on_failure:
|
||||
try:
|
||||
_PathOps.safe_rmtree(install_dir)
|
||||
except Exception:
|
||||
pass
|
||||
self._module.fail_json(msg=str(exc), exception=repr(exc))
|
||||
|
||||
activation = next(
|
||||
(
|
||||
i
|
||||
for i in items
|
||||
if i.name == activation_name or i.link_name == activation_name
|
||||
),
|
||||
items[0],
|
||||
)
|
||||
activation_target = os.path.join(install_dir, activation.name)
|
||||
activation_link = os.path.join(link_dir, activation.link_name)
|
||||
activated = os.path.isfile(activation_target) and _PathOps.is_symlink_to(
|
||||
activation_link, activation_target
|
||||
)
|
||||
|
||||
self._module.exit_json(
|
||||
changed=changed, activated=activated, needs_update=False, details=details
|
||||
)
|
||||
|
|
@ -1,463 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2025, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from typing import Any, Dict, List, Mapping, Optional, Protocol, Sequence, Tuple
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.deb822_repo import (
|
||||
Deb822RepoManager,
|
||||
Deb822RepoSpec,
|
||||
)
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
DOCUMENTATION = r"""
|
||||
module: apt_sources
|
||||
version_added: '2.9.0'
|
||||
author: "Bodo Schulz (@bodsch) <bodo@boone-schulz.de>"
|
||||
|
||||
short_description: Manage APT deb822 (.sources) repositories with repo-specific keyrings.
|
||||
description:
|
||||
- Creates/removes deb822 formatted APT repository files in /etc/apt/sources.list.d.
|
||||
- Supports importing repo-specific signing keys either via downloading a key file (with optional dearmor/validation)
|
||||
or by installing a keyring .deb package (e.g. Sury keyring).
|
||||
- Optionally runs apt-get update when changes occur.
|
||||
options:
|
||||
name:
|
||||
description: Logical name of the repository (used for defaults like filename).
|
||||
type: str
|
||||
required: true
|
||||
state:
|
||||
description: Whether the repository should be present or absent.
|
||||
type: str
|
||||
choices: [present, absent]
|
||||
default: present
|
||||
dest:
|
||||
description: Full path of the .sources file. If omitted, computed from filename/name.
|
||||
type: str
|
||||
filename:
|
||||
description: Filename under /etc/apt/sources.list.d/ (must end with .sources).
|
||||
type: str
|
||||
types:
|
||||
description: Repository types (deb, deb-src).
|
||||
type: list
|
||||
elements: str
|
||||
default: ["deb"]
|
||||
uris:
|
||||
description: Base URIs of the repository.
|
||||
type: list
|
||||
elements: str
|
||||
required: true
|
||||
suites:
|
||||
description: Suites / distributions (e.g. bookworm). If suite ends with '/', Components must be omitted.
|
||||
type: list
|
||||
elements: str
|
||||
required: true
|
||||
components:
|
||||
description: Components (e.g. main, contrib). Required unless suite is a path ending in '/'.
|
||||
type: list
|
||||
elements: str
|
||||
default: []
|
||||
architectures:
|
||||
description: Restrict repository to architectures (e.g. amd64).
|
||||
type: list
|
||||
elements: str
|
||||
default: []
|
||||
enabled:
|
||||
description: Whether the source is enabled (Enabled: yes/no).
|
||||
type: bool
|
||||
default: true
|
||||
signed_by:
|
||||
description: Absolute path to a keyring file used as Signed-By. If omitted and key.method is download/deb, derived from key config.
|
||||
type: str
|
||||
key:
|
||||
description: Key import configuration.
|
||||
type: dict
|
||||
suboptions:
|
||||
method:
|
||||
description: How to manage keys.
|
||||
type: str
|
||||
choices: [none, download, deb]
|
||||
default: none
|
||||
url:
|
||||
description: URL to download the key (download) or keyring .deb (deb).
|
||||
type: str
|
||||
dest:
|
||||
description: Destination keyring path for method=download.
|
||||
type: str
|
||||
checksum:
|
||||
description: Optional SHA256 checksum of downloaded content (raw download). Enables strict idempotence and integrity checks.
|
||||
type: str
|
||||
dearmor:
|
||||
description: If true and downloaded key is ASCII armored, dearmor via gpg to a binary keyring.
|
||||
type: bool
|
||||
default: true
|
||||
validate:
|
||||
description: If true, validate the final key file via gpg --show-keys.
|
||||
type: bool
|
||||
default: true
|
||||
mode:
|
||||
description: File mode for key files / deb cache files.
|
||||
type: str
|
||||
default: "0644"
|
||||
deb_cache_path:
|
||||
description: Destination path for downloaded .deb when method=deb.
|
||||
type: str
|
||||
deb_keyring_path:
|
||||
description: Explicit keyring path provided by that .deb (if auto-detection is not possible).
|
||||
type: str
|
||||
update_cache:
|
||||
description: Run apt-get update if repo/key changed.
|
||||
type: bool
|
||||
default: false
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
- name: Add Sury repo via keyring deb package (Debian)
|
||||
bodsch.core.apt_sources:
|
||||
name: debsuryorg
|
||||
uris: ["https://packages.sury.org/php/"]
|
||||
suites: ["{{ ansible_facts.distribution_release }}"]
|
||||
components: ["main"]
|
||||
key:
|
||||
method: deb
|
||||
url: "https://packages.sury.org/debsuryorg-archive-keyring.deb"
|
||||
deb_cache_path: "/var/cache/apt/debsuryorg-archive-keyring.deb"
|
||||
# optional if auto-detect fails:
|
||||
# deb_keyring_path: "/usr/share/keyrings/debsuryorg-archive-keyring.gpg"
|
||||
update_cache: true
|
||||
become: true
|
||||
|
||||
- name: Add CZ.NIC repo via key download (bookworm)
|
||||
bodsch.core.apt_sources:
|
||||
name: cznic-labs-knot-resolver
|
||||
uris: ["https://pkg.labs.nic.cz/knot-resolver"]
|
||||
suites: ["bookworm"]
|
||||
components: ["main"]
|
||||
key:
|
||||
method: download
|
||||
url: "https://pkg.labs.nic.cz/gpg"
|
||||
dest: "/usr/share/keyrings/cznic-labs-pkg.gpg"
|
||||
dearmor: true
|
||||
validate: true
|
||||
update_cache: true
|
||||
become: true
|
||||
"""
|
||||
|
||||
RETURN = r"""
|
||||
repo_path:
|
||||
description: Path to the managed .sources file.
|
||||
returned: always
|
||||
type: str
|
||||
key_path:
|
||||
description: Path to the keyring file used as Signed-By (if managed/derived).
|
||||
returned: when key method used or signed_by provided
|
||||
type: str
|
||||
changed:
|
||||
description: Whether any change was made.
|
||||
returned: always
|
||||
type: bool
|
||||
messages:
|
||||
description: Informational messages about performed actions.
|
||||
returned: always
|
||||
type: list
|
||||
elements: str
|
||||
"""
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class AnsibleModuleLike(Protocol):
|
||||
"""Minimal typing surface for the Ansible module used by this helper."""
|
||||
|
||||
params: Mapping[str, Any]
|
||||
|
||||
def get_bin_path(self, arg: str, required: bool = False) -> Optional[str]:
|
||||
"""
|
||||
Return the absolute path to an executable.
|
||||
|
||||
Args:
|
||||
arg: Program name to look up in PATH.
|
||||
required: If True, the module typically fails when the binary is not found.
|
||||
|
||||
Returns:
|
||||
Absolute path to the executable, or None if not found and not required.
|
||||
"""
|
||||
...
|
||||
|
||||
def run_command(
|
||||
self, args: Sequence[str], check_rc: bool = True
|
||||
) -> Tuple[int, str, str]:
|
||||
"""
|
||||
Execute a command on the target host.
|
||||
|
||||
Args:
|
||||
args: Argument vector (already split).
|
||||
check_rc: If True, non-zero return codes should be treated as errors.
|
||||
|
||||
Returns:
|
||||
Tuple ``(rc, stdout, stderr)``.
|
||||
"""
|
||||
...
|
||||
|
||||
def log(self, msg: str = "", **kwargs: Any) -> None:
|
||||
"""
|
||||
Write a log/debug message via the Ansible module.
|
||||
|
||||
Args:
|
||||
msg: Message text.
|
||||
**kwargs: Additional structured log fields (module dependent).
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
class AptSources:
|
||||
"""
|
||||
Manage APT deb822 (.sources) repositories with repo-specific keyrings.
|
||||
|
||||
This class is the orchestration layer used by the module entrypoint. It delegates the
|
||||
actual file/key handling to :class:`Deb822RepoManager` and is responsible for:
|
||||
- computing the target .sources path
|
||||
- ensuring/removing repository key material (method=download or method=deb)
|
||||
- ensuring/removing the repository file
|
||||
- optionally running ``apt-get update`` when changes occur
|
||||
"""
|
||||
|
||||
module = None
|
||||
|
||||
def __init__(self, module: AnsibleModuleLike):
|
||||
"""
|
||||
Initialize the handler and snapshot module parameters.
|
||||
|
||||
Args:
|
||||
module: An AnsibleModule-like object providing ``params``, logging and command execution.
|
||||
"""
|
||||
self.module = module
|
||||
|
||||
self.module.log("AptSources::__init__()")
|
||||
|
||||
self.name = module.params.get("name")
|
||||
self.state = module.params.get("state")
|
||||
self.destination = module.params.get("dest")
|
||||
self.filename = module.params.get("filename")
|
||||
self.types = module.params.get("types")
|
||||
self.uris = module.params.get("uris")
|
||||
self.suites = module.params.get("suites")
|
||||
self.components = module.params.get("components")
|
||||
self.architectures = module.params.get("architectures")
|
||||
self.enabled = module.params.get("enabled")
|
||||
self.update_cache = module.params.get("update_cache")
|
||||
self.signed_by = module.params.get("signed_by")
|
||||
self.keys = module.params.get("key")
|
||||
|
||||
self.option_method = self.keys.get("method")
|
||||
self.option_url = self.keys.get("url")
|
||||
self.option_dest = self.keys.get("dest")
|
||||
self.option_checksum = self.keys.get("checksum")
|
||||
self.option_dearmor = self.keys.get("dearmor")
|
||||
self.option_validate = self.keys.get("validate")
|
||||
self.option_mode = self.keys.get("mode")
|
||||
self.option_deb_cache_path = self.keys.get("deb_cache_path")
|
||||
self.option_deb_keyring_path = self.keys.get("deb_keyring_path")
|
||||
|
||||
def run(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Apply the requested repository state.
|
||||
|
||||
For ``state=present`` the method ensures the signing key (if configured) and then writes the
|
||||
deb822 repository file. For ``state=absent`` it removes the repository file and any managed
|
||||
key material.
|
||||
|
||||
Returns:
|
||||
A result dictionary intended for ``module.exit_json()``, containing:
|
||||
|
||||
- ``changed``: Whether any managed resource changed.
|
||||
- ``repo_path``: Path to the managed ``.sources`` file.
|
||||
- ``key_path``: Path to the keyring file used for ``Signed-By`` (if any).
|
||||
- ``messages``: Informational messages describing performed actions.
|
||||
|
||||
Note:
|
||||
When ``state=absent`` this method exits the module early via ``module.exit_json()``.
|
||||
"""
|
||||
self.module.log("AptSources::run()")
|
||||
|
||||
# self.module.log(f" - update_cache: {self.update_cache}")
|
||||
|
||||
mng = Deb822RepoManager(self.module)
|
||||
|
||||
repo_path = self._ensure_sources_path(
|
||||
mng, self.name, self.destination, self.filename
|
||||
)
|
||||
|
||||
changed = False
|
||||
messages: List[str] = []
|
||||
|
||||
if self.state == "absent":
|
||||
key_cfg: Dict[str, Any] = self.keys or {"method": "none"}
|
||||
|
||||
if mng.remove_file(path=repo_path, check_mode=bool(self.module.check_mode)):
|
||||
changed = True
|
||||
messages.append(f"removed repo file: {repo_path}")
|
||||
|
||||
# remove managed key material as well
|
||||
key_res = mng.remove_key(
|
||||
key_cfg=key_cfg,
|
||||
signed_by=self.signed_by,
|
||||
check_mode=bool(self.module.check_mode),
|
||||
)
|
||||
if key_res.messages:
|
||||
messages.extend(list(key_res.messages))
|
||||
if key_res.changed:
|
||||
changed = True
|
||||
|
||||
self.module.exit_json(
|
||||
changed=changed,
|
||||
repo_path=repo_path,
|
||||
key_path=(self.signed_by or key_res.key_path),
|
||||
messages=messages,
|
||||
)
|
||||
|
||||
# present
|
||||
key_cfg: Dict[str, Any] = self.keys or {"method": "none"}
|
||||
key_res = mng.ensure_key(key_cfg=key_cfg, check_mode=self.module.check_mode)
|
||||
|
||||
# self.module.log(f" - key_res : {key_res}")
|
||||
|
||||
if key_res.messages:
|
||||
messages.extend(list(key_res.messages))
|
||||
|
||||
if key_res.changed:
|
||||
changed = True
|
||||
|
||||
signed_by: Optional[str] = self.signed_by or key_res.key_path
|
||||
|
||||
spec = Deb822RepoSpec(
|
||||
types=self.types,
|
||||
uris=self.uris,
|
||||
suites=self.suites,
|
||||
components=self.components,
|
||||
architectures=self.architectures,
|
||||
enabled=self.enabled,
|
||||
signed_by=signed_by,
|
||||
)
|
||||
|
||||
repo_mode = 0o644
|
||||
repo_res = mng.ensure_repo_file(
|
||||
repo_path=repo_path,
|
||||
spec=spec,
|
||||
mode=repo_mode,
|
||||
check_mode=self.module.check_mode,
|
||||
)
|
||||
|
||||
# self.module.log(f" - repo_res : {repo_res}")
|
||||
|
||||
if repo_res.changed:
|
||||
changed = True
|
||||
messages.append(f"updated repo file: {repo_path}")
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Optionally update cache only if something changed
|
||||
if self.update_cache and (key_res.changed or repo_res.changed):
|
||||
_, out = mng.apt_update(check_mode=self.module.check_mode)
|
||||
messages.append("apt-get update executed")
|
||||
if out:
|
||||
# keep it short to avoid noisy output
|
||||
messages.append("apt-get update: ok")
|
||||
|
||||
return dict(
|
||||
changed=changed,
|
||||
repo_path=repo_path,
|
||||
key_path=signed_by,
|
||||
messages=messages,
|
||||
)
|
||||
|
||||
def _ensure_sources_path(
|
||||
self,
|
||||
manager: Deb822RepoManager,
|
||||
name: str,
|
||||
dest: Optional[str],
|
||||
filename: Optional[str],
|
||||
) -> str:
|
||||
"""
|
||||
Determine the destination path of the ``.sources`` file.
|
||||
|
||||
If ``dest`` is provided it is returned unchanged. Otherwise a filename is derived from
|
||||
``filename`` or ``name``, validated, and placed under ``/etc/apt/sources.list.d/``.
|
||||
|
||||
Args:
|
||||
manager: Repo manager used for validation.
|
||||
name: Logical repository name.
|
||||
dest: Explicit destination path (optional).
|
||||
filename: Filename (optional, must end in ``.sources``).
|
||||
|
||||
Returns:
|
||||
The absolute path of the repository file to manage.
|
||||
"""
|
||||
|
||||
if dest:
|
||||
return dest
|
||||
|
||||
fn = filename or f"{name}.sources"
|
||||
# validate filename rules and suffix
|
||||
manager.validate_filename(fn)
|
||||
return f"/etc/apt/sources.list.d/{fn}"
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Entrypoint for the Ansible module.
|
||||
|
||||
Parses module arguments, executes the handler and returns the result via ``exit_json``.
|
||||
"""
|
||||
args = dict(
|
||||
name=dict(type="str", required=True),
|
||||
state=dict(type="str", choices=["present", "absent"], default="present"),
|
||||
dest=dict(type="str", required=False),
|
||||
filename=dict(type="str", required=False),
|
||||
types=dict(type="list", elements="str", default=["deb"]),
|
||||
uris=dict(type="list", elements="str", required=True),
|
||||
suites=dict(type="list", elements="str", required=True),
|
||||
components=dict(type="list", elements="str", default=[]),
|
||||
architectures=dict(type="list", elements="str", default=[]),
|
||||
enabled=dict(type="bool", default=True),
|
||||
signed_by=dict(type="str", required=False),
|
||||
key=dict(
|
||||
type="dict",
|
||||
required=False,
|
||||
options=dict(
|
||||
method=dict(
|
||||
type="str", choices=["none", "download", "deb"], default="none"
|
||||
),
|
||||
url=dict(type="str", required=False),
|
||||
dest=dict(type="str", required=False),
|
||||
checksum=dict(type="str", required=False),
|
||||
dearmor=dict(type="bool", default=True),
|
||||
validate=dict(type="bool", default=True),
|
||||
mode=dict(type="str", default="0644"),
|
||||
deb_cache_path=dict(type="str", required=False),
|
||||
deb_keyring_path=dict(type="str", required=False),
|
||||
),
|
||||
),
|
||||
update_cache=dict(type="bool", default=False),
|
||||
)
|
||||
module = AnsibleModule(
|
||||
argument_spec=args,
|
||||
supports_check_mode=False,
|
||||
)
|
||||
|
||||
handler = AptSources(module)
|
||||
result = handler.run()
|
||||
|
||||
module.log(msg=f"= result: {result}")
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,959 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import tarfile
|
||||
import urllib.parse
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Protocol,
|
||||
Sequence,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import open_url
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
DOCUMENTATION = r"""
|
||||
---
|
||||
module: aur
|
||||
short_description: Install or remove Arch Linux packages from the AUR
|
||||
version_added: "0.9.0"
|
||||
author:
|
||||
- Bodo Schulz (@bodsch) <bodo@boone-schulz.de>
|
||||
|
||||
description:
|
||||
- Installs packages from the Arch User Repository (AUR) by building them with C(makepkg).
|
||||
- Recommended: install from a Git repository URL (cloned into C($HOME/<name>), then updated via C(git pull)).
|
||||
- Fallback: if C(repository) is omitted, the module queries the AUR RPC API and downloads/extracts the source tarball to build it.
|
||||
- Ensures idempotency by comparing the currently installed package version with the upstream version (prefers C(.SRCINFO),
|
||||
- falls back to parsing C(PKGBUILD)); pkgrel-only updates trigger a rebuild.
|
||||
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Whether the package should be installed or removed.
|
||||
type: str
|
||||
default: present
|
||||
choices: [present, absent]
|
||||
|
||||
name:
|
||||
description:
|
||||
- Package name to manage (pacman package name / AUR package name).
|
||||
type: str
|
||||
required: true
|
||||
|
||||
repository:
|
||||
description:
|
||||
- Git repository URL that contains the PKGBUILD (usually under U(https://aur.archlinux.org)).
|
||||
- If omitted, the module uses the AUR RPC API to download the source tarball.
|
||||
type: str
|
||||
required: false
|
||||
|
||||
extra_args:
|
||||
description:
|
||||
- Additional arguments passed to C(makepkg) (for example C(--skippgpcheck), C(--nocheck)).
|
||||
type: list
|
||||
elements: str
|
||||
required: false
|
||||
version_added: "2.2.4"
|
||||
|
||||
notes:
|
||||
- Check mode is not supported.
|
||||
- The module is expected to run as a non-root build user (e.g. via C(become_user: aur_builder)).
|
||||
- The build user must be able to install packages non-interactively (makepkg/pacman), and to remove
|
||||
- packages this module uses C(sudo pacman -R...) when C(state=absent).
|
||||
- Network access to AUR is required for repository cloning/pulling or tarball download.
|
||||
|
||||
requirements:
|
||||
- pacman
|
||||
- git (when C(repository) is used)
|
||||
- makepkg (base-devel)
|
||||
- sudo (for C(state=absent) removal path)
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
- name: Install package via AUR repository (recommended)
|
||||
become: true
|
||||
become_user: aur_builder
|
||||
bodsch.core.aur:
|
||||
state: present
|
||||
name: icinga2
|
||||
repository: https://aur.archlinux.org/icinga2.git
|
||||
|
||||
- name: Install package via AUR repository with makepkg extra arguments
|
||||
become: true
|
||||
become_user: aur_builder
|
||||
bodsch.core.aur:
|
||||
state: present
|
||||
name: php-pear
|
||||
repository: https://aur.archlinux.org/php-pear.git
|
||||
extra_args:
|
||||
- --skippgpcheck
|
||||
|
||||
- name: Install package via AUR tarball download (repository omitted)
|
||||
become: true
|
||||
become_user: aur_builder
|
||||
bodsch.core.aur:
|
||||
state: present
|
||||
name: yay
|
||||
|
||||
- name: Remove package
|
||||
become: true
|
||||
bodsch.core.aur:
|
||||
state: absent
|
||||
name: yay
|
||||
"""
|
||||
|
||||
RETURN = r"""
|
||||
changed:
|
||||
description:
|
||||
- Whether the module made changes.
|
||||
- C(true) when a package was installed/rebuilt/removed, otherwise C(false).
|
||||
returned: always
|
||||
type: bool
|
||||
|
||||
failed:
|
||||
description:
|
||||
- Indicates whether the module failed.
|
||||
returned: always
|
||||
type: bool
|
||||
|
||||
msg:
|
||||
description:
|
||||
- Human readable status or error message.
|
||||
- For idempotent runs, typically reports that the version is already installed.
|
||||
returned: always
|
||||
type: str
|
||||
sample:
|
||||
- "Package yay successfully installed."
|
||||
- "Package yay successfully removed."
|
||||
- "Version 1.2.3-1 is already installed."
|
||||
"""
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class AnsibleModuleLike(Protocol):
|
||||
"""Minimal typing surface for the Ansible module used by this helper."""
|
||||
|
||||
params: Mapping[str, Any]
|
||||
|
||||
def get_bin_path(self, arg: str, required: bool = False) -> Optional[str]:
|
||||
"""
|
||||
Return the absolute path to an executable.
|
||||
|
||||
Args:
|
||||
arg: Program name to look up in PATH.
|
||||
required: If True, the module typically fails when the binary is not found.
|
||||
|
||||
Returns:
|
||||
Absolute path to the executable, or None if not found and not required.
|
||||
"""
|
||||
...
|
||||
|
||||
def run_command(
|
||||
self, args: Sequence[str], check_rc: bool = True
|
||||
) -> Tuple[int, str, str]:
|
||||
"""
|
||||
Execute a command on the target host.
|
||||
|
||||
Args:
|
||||
args: Argument vector (already split).
|
||||
check_rc: If True, non-zero return codes should be treated as errors.
|
||||
|
||||
Returns:
|
||||
Tuple ``(rc, stdout, stderr)``.
|
||||
"""
|
||||
...
|
||||
|
||||
def log(self, msg: str = "", **kwargs: Any) -> None:
|
||||
"""
|
||||
Write a log/debug message via the Ansible module.
|
||||
|
||||
Args:
|
||||
msg: Message text.
|
||||
**kwargs: Additional structured log fields (module dependent).
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
_PACMAN_Q_RE = re.compile(r"^(?P<name>\S+)\s+(?P<ver>\S+)\s*$", re.MULTILINE)
|
||||
_PKGBUILD_PKGVER_RE = re.compile(r"^pkgver=(?P<version>.*)\s*$", re.MULTILINE)
|
||||
_PKGBUILD_EPOCH_RE = re.compile(r"^epoch=(?P<epoch>.*)\s*$", re.MULTILINE)
|
||||
_SRCINFO_PKGVER_RE = re.compile(r"^\s*pkgver\s*=\s*(?P<version>.*)\s*$", re.MULTILINE)
|
||||
_SRCINFO_EPOCH_RE = re.compile(r"^\s*epoch\s*=\s*(?P<epoch>.*)\s*$", re.MULTILINE)
|
||||
_PKGBUILD_PKGREL_RE = re.compile(r"^pkgrel=(?P<pkgrel>.*)\s*$", re.MULTILINE)
|
||||
_SRCINFO_PKGREL_RE = re.compile(r"^\s*pkgrel\s*=\s*(?P<pkgrel>.*)\s*$", re.MULTILINE)
|
||||
|
||||
|
||||
class Aur:
|
||||
"""
|
||||
Implements AUR package installation/removal.
|
||||
|
||||
Notes:
|
||||
- The module is expected to run as a non-root user that is allowed to build packages
|
||||
via makepkg (e.g. a dedicated 'aur_builder' user).
|
||||
- Repository-based installation is recommended. The tarball-based installation path
|
||||
exists as a fallback when no repository URL is provided.
|
||||
"""
|
||||
|
||||
module = None
|
||||
|
||||
def __init__(self, module: AnsibleModuleLike):
|
||||
"""
|
||||
Initialize helper state from Ansible module parameters.
|
||||
"""
|
||||
self.module = module
|
||||
self.module.log("Aur::__init__()")
|
||||
|
||||
self.state: str = module.params.get("state")
|
||||
self.name: str = module.params.get("name")
|
||||
self.repository: Optional[str] = module.params.get("repository")
|
||||
self.extra_args: Optional[List[str]] = module.params.get("extra_args")
|
||||
|
||||
# Cached state for idempotency decisions during this module run.
|
||||
self._installed_version: Optional[str] = None
|
||||
self._installed_version_full: Optional[str] = None
|
||||
|
||||
self.pacman_binary: Optional[str] = self.module.get_bin_path("pacman", True)
|
||||
self.git_binary: Optional[str] = self.module.get_bin_path("git", True)
|
||||
|
||||
def run(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Execute the requested state transition.
|
||||
|
||||
Returns:
|
||||
A result dictionary consumable by Ansible's exit_json().
|
||||
"""
|
||||
self.module.log("Aur::run()")
|
||||
|
||||
installed, installed_version = self.package_installed(self.name)
|
||||
|
||||
# Store installed version for use by other code paths (e.g. AUR tarball installs).
|
||||
self._installed_version = installed_version
|
||||
self._installed_version_full = (
|
||||
self._package_installed_full_version(self.name) if installed else None
|
||||
)
|
||||
|
||||
if self._installed_version_full:
|
||||
self.module.log(
|
||||
msg=f" {self.name} full version: {self._installed_version_full}"
|
||||
)
|
||||
|
||||
self.module.log(
|
||||
msg=f" {self.name} is installed: {installed} / version: {installed_version}"
|
||||
)
|
||||
|
||||
if installed and self.state == "absent":
|
||||
sudo_binary = self.module.get_bin_path("sudo", True)
|
||||
|
||||
args: List[str] = [
|
||||
sudo_binary,
|
||||
self.pacman_binary or "pacman",
|
||||
"--remove",
|
||||
"--cascade",
|
||||
"--recursive",
|
||||
"--noconfirm",
|
||||
self.name,
|
||||
]
|
||||
|
||||
rc, _, err = self._exec(args)
|
||||
|
||||
if rc == 0:
|
||||
return dict(
|
||||
changed=True, msg=f"Package {self.name} successfully removed."
|
||||
)
|
||||
return dict(
|
||||
failed=True,
|
||||
changed=False,
|
||||
msg=f"An error occurred while removing the package {self.name}: {err}",
|
||||
)
|
||||
|
||||
if self.state == "present":
|
||||
if self.repository:
|
||||
rc, out, err, changed = self.install_from_repository(installed_version)
|
||||
|
||||
if rc == 99:
|
||||
msg = out
|
||||
rc = 0
|
||||
else:
|
||||
msg = f"Package {self.name} successfully installed."
|
||||
else:
|
||||
rc, out, err, changed = self.install_from_aur()
|
||||
msg = (
|
||||
out
|
||||
if rc == 0 and out
|
||||
else f"Package {self.name} successfully installed."
|
||||
)
|
||||
|
||||
if rc == 0:
|
||||
return dict(failed=False, changed=changed, msg=msg)
|
||||
return dict(failed=True, msg=err)
|
||||
|
||||
return dict(
|
||||
failed=False,
|
||||
changed=False,
|
||||
msg="It's all right. Keep moving! There is nothing to see!",
|
||||
)
|
||||
|
||||
def package_installed(self, package: str) -> Tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Determine whether a package is installed and return its version key (epoch+pkgver, without pkgrel).
|
||||
|
||||
Args:
|
||||
package: Pacman package name to check.
|
||||
|
||||
Returns:
|
||||
Tuple (installed, version_string)
|
||||
- installed: True if pacman reports the package is installed.
|
||||
- version_string: comparable version key '<epoch>:<pkgver>' without pkgrel (epoch optional) or None if not installed.
|
||||
"""
|
||||
self.module.log(f"Aur::package_installed(package: {package})")
|
||||
|
||||
args: List[str] = [
|
||||
self.pacman_binary or "pacman",
|
||||
"--query",
|
||||
package,
|
||||
]
|
||||
|
||||
rc, out, _ = self._exec(args, check=False)
|
||||
|
||||
version_string: Optional[str] = None
|
||||
if out:
|
||||
m = _PACMAN_Q_RE.search(out)
|
||||
if m and m.group("name") == package:
|
||||
full_version = m.group("ver")
|
||||
# pacman prints "<epoch>:<pkgver>-<pkgrel>" (epoch optional).
|
||||
version_string = (
|
||||
full_version.rsplit("-", 1)[0]
|
||||
if "-" in full_version
|
||||
else full_version
|
||||
)
|
||||
|
||||
return (rc == 0, version_string)
|
||||
|
||||
def _package_installed_full_version(self, package: str) -> Optional[str]:
|
||||
"""
|
||||
Return the full pacman version string for an installed package.
|
||||
|
||||
The returned string includes both epoch and pkgrel if present, matching the output
|
||||
format of "pacman -Q":
|
||||
- "<epoch>:<pkgver>-<pkgrel>" (epoch optional)
|
||||
|
||||
Args:
|
||||
package: Pacman package name to check.
|
||||
|
||||
Returns:
|
||||
The full version string or None if the package is not installed.
|
||||
"""
|
||||
self.module.log(f"Aur::_package_installed_full_version(package: {package})")
|
||||
|
||||
args: List[str] = [
|
||||
self.pacman_binary or "pacman",
|
||||
"--query",
|
||||
package,
|
||||
]
|
||||
|
||||
rc, out, _ = self._exec(args, check=False)
|
||||
if rc != 0 or not out:
|
||||
return None
|
||||
|
||||
m = _PACMAN_Q_RE.search(out)
|
||||
if m and m.group("name") == package:
|
||||
return m.group("ver")
|
||||
|
||||
return None
|
||||
|
||||
def run_makepkg(self, directory: str) -> Tuple[int, str, str]:
|
||||
"""
|
||||
Run makepkg to build and install a package.
|
||||
|
||||
Args:
|
||||
directory: Directory containing the PKGBUILD.
|
||||
|
||||
Returns:
|
||||
Tuple (rc, out, err) from the makepkg execution.
|
||||
"""
|
||||
self.module.log(f"Aur::run_makepkg(directory: {directory})")
|
||||
self.module.log(f" current dir : {os.getcwd()}")
|
||||
|
||||
if not os.path.exists(directory):
|
||||
return (1, "", f"Directory '{directory}' does not exist.")
|
||||
|
||||
makepkg_binary = self.module.get_bin_path("makepkg", required=True) or "makepkg"
|
||||
|
||||
args: List[str] = [
|
||||
makepkg_binary,
|
||||
"--syncdeps",
|
||||
"--install",
|
||||
"--noconfirm",
|
||||
"--needed",
|
||||
"--clean",
|
||||
]
|
||||
|
||||
if self.extra_args:
|
||||
args += self.extra_args
|
||||
|
||||
with self._pushd(directory):
|
||||
rc, out, err = self._exec(args, check=False)
|
||||
|
||||
return (rc, out, err)
|
||||
|
||||
def install_from_aur(self) -> Tuple[int, str, str, bool]:
|
||||
"""
|
||||
Install a package by downloading its source tarball from AUR.
|
||||
|
||||
Returns:
|
||||
Tuple (rc, out, err, changed)
|
||||
"""
|
||||
self.module.log("Aur::install_from_aur()")
|
||||
|
||||
import tempfile
|
||||
|
||||
try:
|
||||
rpc = self._aur_rpc_info(self.name)
|
||||
except Exception as exc:
|
||||
return (1, "", f"Failed to query AUR RPC API: {exc}", False)
|
||||
|
||||
if rpc.get("resultcount") != 1:
|
||||
return (1, "", f"Package '{self.name}' not found on AUR.", False)
|
||||
|
||||
result = rpc["results"][0]
|
||||
url_path = result.get("URLPath")
|
||||
if not url_path:
|
||||
return (1, "", f"AUR did not return a source URL for '{self.name}'.", False)
|
||||
|
||||
tar_url = f"https://aur.archlinux.org/{url_path}"
|
||||
self.module.log(f" tarball url {tar_url}")
|
||||
|
||||
try:
|
||||
f = open_url(tar_url)
|
||||
except Exception as exc:
|
||||
return (1, "", f"Failed to download AUR tarball: {exc}", False)
|
||||
|
||||
try:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
with tarfile.open(mode="r|*", fileobj=f) as tar:
|
||||
self._safe_extract_stream(tar, tmpdir)
|
||||
|
||||
build_dir = self._find_pkgbuild_dir(tmpdir)
|
||||
if not build_dir:
|
||||
return (
|
||||
1,
|
||||
"",
|
||||
"Unable to locate PKGBUILD in extracted source tree.",
|
||||
False,
|
||||
)
|
||||
|
||||
upstream_version = self._read_upstream_version_key(build_dir)
|
||||
upstream_full_version = self._read_upstream_full_version(build_dir)
|
||||
|
||||
# Prefer comparing full versions (epoch:pkgver-pkgrel). This ensures pkgrel-only
|
||||
# bumps trigger a rebuild, matching pacman's notion of a distinct package version.
|
||||
if self._installed_version_full and upstream_full_version:
|
||||
if self._installed_version_full == upstream_full_version:
|
||||
return (
|
||||
0,
|
||||
f"Version {self._installed_version_full} is already installed.",
|
||||
"",
|
||||
False,
|
||||
)
|
||||
elif self._installed_version and upstream_version:
|
||||
if self._installed_version == upstream_version:
|
||||
return (
|
||||
0,
|
||||
f"Version {self._installed_version} is already installed.",
|
||||
"",
|
||||
False,
|
||||
)
|
||||
|
||||
rc, out, err = self.run_makepkg(build_dir)
|
||||
except Exception as exc:
|
||||
return (1, "", f"Failed to extract/build AUR source: {exc}", False)
|
||||
|
||||
return (rc, out, err, rc == 0)
|
||||
|
||||
def install_from_repository(
|
||||
self, installed_version: Optional[str]
|
||||
) -> Tuple[int, str, str, bool]:
|
||||
"""
|
||||
Install a package from a Git repository (recommended).
|
||||
|
||||
Args:
|
||||
installed_version: Currently installed version key '<epoch>:<pkgver>' without pkgrel (epoch optional) or None.
|
||||
|
||||
Returns:
|
||||
Tuple (rc, out, err, changed)
|
||||
|
||||
Special return code:
|
||||
- rc == 99 indicates "already installed / no change" (kept for backward compatibility).
|
||||
"""
|
||||
self.module.log(
|
||||
f"Aur::install_from_repository(installed_version: {installed_version})"
|
||||
)
|
||||
|
||||
base_dir = str(Path.home())
|
||||
repo_dir = os.path.join(base_dir, self.name)
|
||||
|
||||
with self._pushd(base_dir):
|
||||
if not os.path.exists(repo_dir):
|
||||
rc, out, _err = self.git_clone(repository=self.repository or "")
|
||||
if rc != 0:
|
||||
return (rc, out, "Unable to run 'git clone'.", False)
|
||||
|
||||
with self._pushd(repo_dir):
|
||||
if os.path.exists(".git"):
|
||||
rc, out, _err = self.git_pull()
|
||||
if rc != 0:
|
||||
return (rc, out, "Unable to run 'git pull'.", False)
|
||||
|
||||
with self._pushd(repo_dir):
|
||||
pkgbuild_file = "PKGBUILD"
|
||||
if not os.path.exists(pkgbuild_file):
|
||||
return (1, "", "Unable to find PKGBUILD.", False)
|
||||
|
||||
upstream_version = self._read_upstream_version_key(os.getcwd())
|
||||
upstream_full_version = self._read_upstream_full_version(os.getcwd())
|
||||
|
||||
# Prefer comparing full versions (epoch:pkgver-pkgrel). This ensures pkgrel-only bumps
|
||||
# trigger a rebuild even if pkgver stayed constant.
|
||||
if self._installed_version_full and upstream_full_version:
|
||||
if self._installed_version_full == upstream_full_version:
|
||||
return (
|
||||
99,
|
||||
f"Version {self._installed_version_full} is already installed.",
|
||||
"",
|
||||
False,
|
||||
)
|
||||
elif installed_version and upstream_version:
|
||||
if installed_version == upstream_version:
|
||||
return (
|
||||
99,
|
||||
f"Version {installed_version} is already installed.",
|
||||
"",
|
||||
False,
|
||||
)
|
||||
|
||||
self.module.log(
|
||||
msg=f"upstream version: {upstream_full_version or upstream_version}"
|
||||
)
|
||||
|
||||
rc, out, err = self.run_makepkg(repo_dir)
|
||||
|
||||
return (rc, out, err, rc == 0)
|
||||
|
||||
def git_clone(self, repository: str) -> Tuple[int, str, str]:
|
||||
"""
|
||||
Clone the repository into a local directory named after the package.
|
||||
|
||||
Returns:
|
||||
Tuple (rc, out, err)
|
||||
"""
|
||||
self.module.log(f"Aur::git_clone(repository: {repository})")
|
||||
|
||||
if not self.git_binary:
|
||||
return (1, "", "git not found")
|
||||
|
||||
args: List[str] = [
|
||||
self.git_binary,
|
||||
"clone",
|
||||
repository,
|
||||
self.name,
|
||||
]
|
||||
|
||||
rc, out, err = self._exec(args)
|
||||
return (rc, out, err)
|
||||
|
||||
def git_pull(self) -> Tuple[int, str, str]:
|
||||
"""
|
||||
Update an existing Git repository.
|
||||
|
||||
Returns:
|
||||
Tuple (rc, out, err)
|
||||
"""
|
||||
self.module.log("Aur::git_pull()")
|
||||
|
||||
if not self.git_binary:
|
||||
return (1, "", "git not found")
|
||||
|
||||
args: List[str] = [
|
||||
self.git_binary,
|
||||
"pull",
|
||||
]
|
||||
|
||||
rc, out, err = self._exec(args)
|
||||
return (rc, out, err)
|
||||
|
||||
def _exec(self, cmd: Sequence[str], check: bool = False) -> Tuple[int, str, str]:
|
||||
"""
|
||||
Execute a command via Ansible's run_command().
|
||||
|
||||
Args:
|
||||
cmd: Argument vector (already split).
|
||||
check: If True, fail the module on non-zero return code.
|
||||
|
||||
Returns:
|
||||
Tuple (rc, out, err)
|
||||
"""
|
||||
self.module.log(f"Aur::_exec(cmd: {cmd}, check: {check})")
|
||||
|
||||
rc, out, err = self.module.run_command(list(cmd), check_rc=check)
|
||||
|
||||
if rc != 0:
|
||||
self.module.log(f" rc : '{rc}'")
|
||||
self.module.log(f" out: '{out}'")
|
||||
self.module.log(f" err: '{err}'")
|
||||
|
||||
return (rc, out, err)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Internal helpers
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
@contextmanager
|
||||
def _pushd(self, directory: str) -> Iterator[None]:
|
||||
"""
|
||||
Temporarily change the current working directory.
|
||||
|
||||
This avoids leaking state across module runs and improves correctness of
|
||||
commands like makepkg, git clone, and git pull.
|
||||
"""
|
||||
self.module.log(f"Aur::_pushd(directory: {directory})")
|
||||
|
||||
prev = os.getcwd()
|
||||
os.chdir(directory)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(prev)
|
||||
|
||||
def _aur_rpc_info(self, package: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Query the AUR RPC API for a package.
|
||||
|
||||
Returns:
|
||||
Parsed JSON dictionary.
|
||||
"""
|
||||
self.module.log(f"Aur::_aur_rpc_info(package: {package})")
|
||||
|
||||
url = "https://aur.archlinux.org/rpc/?v=5&type=info&arg=" + urllib.parse.quote(
|
||||
package
|
||||
)
|
||||
self.module.log(f" rpc url {url}")
|
||||
|
||||
resp = open_url(url)
|
||||
return json.loads(resp.read().decode("utf-8"))
|
||||
|
||||
def _safe_extract_stream(self, tar: tarfile.TarFile, target_dir: str) -> None:
|
||||
"""
|
||||
Safely extract a tar stream into target_dir.
|
||||
|
||||
This prevents path traversal attacks by validating each member's target path
|
||||
before extraction.
|
||||
"""
|
||||
self.module.log(
|
||||
f"Aur::_safe_extract_stream(tar: {tar}, target_dir: {target_dir})"
|
||||
)
|
||||
|
||||
target_real = os.path.realpath(target_dir)
|
||||
for member in tar:
|
||||
member_path = os.path.realpath(os.path.join(target_dir, member.name))
|
||||
if (
|
||||
not member_path.startswith(target_real + os.sep)
|
||||
and member_path != target_real
|
||||
):
|
||||
raise ValueError(f"Blocked tar path traversal attempt: {member.name}")
|
||||
tar.extract(member, target_dir)
|
||||
|
||||
def _find_pkgbuild_dir(self, root_dir: str) -> Optional[str]:
|
||||
"""
|
||||
Locate the directory that contains the PKGBUILD file inside root_dir.
|
||||
"""
|
||||
self.module.log(f"Aur::_find_pkgbuild_dir(root_dir: {root_dir})")
|
||||
|
||||
for dirpath, _, filenames in os.walk(root_dir):
|
||||
if "PKGBUILD" in filenames:
|
||||
return dirpath
|
||||
return None
|
||||
|
||||
def _read_pkgbuild_pkgver(self, pkgbuild_path: str) -> str:
|
||||
"""
|
||||
Read pkgver from a PKGBUILD file.
|
||||
|
||||
Note:
|
||||
This is a best-effort parse of 'pkgver='. It does not execute PKGBUILD code.
|
||||
"""
|
||||
self.module.log(f"Aur::_read_pkgbuild_pkgver(pkgbuild_path: {pkgbuild_path})")
|
||||
|
||||
try:
|
||||
with open(pkgbuild_path, "r", encoding="utf-8") as f:
|
||||
data = f.read()
|
||||
except OSError as exc:
|
||||
self.module.log(msg=f"Unable to read PKGBUILD: {exc}")
|
||||
return ""
|
||||
|
||||
m = _PKGBUILD_PKGVER_RE.search(data)
|
||||
return self._sanitize_scalar(m.group("version")) if m else ""
|
||||
|
||||
def _read_pkgbuild_pkgrel(self, pkgbuild_path: str) -> str:
|
||||
"""
|
||||
Read pkgrel from a PKGBUILD file.
|
||||
|
||||
Note:
|
||||
This is a best-effort parse of 'pkgrel='. It does not execute PKGBUILD code.
|
||||
"""
|
||||
self.module.log(f"Aur::_read_pkgbuild_pkgrel(pkgbuild_path: {pkgbuild_path})")
|
||||
|
||||
try:
|
||||
with open(pkgbuild_path, "r", encoding="utf-8") as f:
|
||||
data = f.read()
|
||||
except OSError as exc:
|
||||
self.module.log(msg=f"Unable to read PKGBUILD: {exc}")
|
||||
return ""
|
||||
|
||||
m = _PKGBUILD_PKGREL_RE.search(data)
|
||||
return self._sanitize_scalar(m.group("pkgrel")) if m else ""
|
||||
|
||||
def _read_pkgbuild_full_version(self, pkgbuild_path: str) -> str:
|
||||
"""
|
||||
Read epoch/pkgver/pkgrel from PKGBUILD and return a comparable full version string.
|
||||
|
||||
The returned format matches pacman's version string without architecture:
|
||||
- "<epoch>:<pkgver>-<pkgrel>" (epoch optional)
|
||||
"""
|
||||
self.module.log(
|
||||
f"Aur::_read_pkgbuild_full_version(pkgbuild_path: {pkgbuild_path})"
|
||||
)
|
||||
|
||||
pkgver = self._read_pkgbuild_pkgver(pkgbuild_path)
|
||||
pkgrel = self._read_pkgbuild_pkgrel(pkgbuild_path)
|
||||
epoch = self._read_pkgbuild_epoch(pkgbuild_path)
|
||||
|
||||
return self._make_full_version(pkgver=pkgver, pkgrel=pkgrel, epoch=epoch)
|
||||
|
||||
def _read_srcinfo_full_version(self, srcinfo_path: str) -> str:
|
||||
"""
|
||||
Read epoch/pkgver/pkgrel from a .SRCINFO file.
|
||||
"""
|
||||
self.module.log(
|
||||
f"Aur::_read_srcinfo_full_version(srcinfo_path: {srcinfo_path})"
|
||||
)
|
||||
|
||||
try:
|
||||
with open(srcinfo_path, "r", encoding="utf-8") as f:
|
||||
data = f.read()
|
||||
except OSError:
|
||||
return ""
|
||||
|
||||
pkgver_m = _SRCINFO_PKGVER_RE.search(data)
|
||||
pkgrel_m = _SRCINFO_PKGREL_RE.search(data)
|
||||
epoch_m = _SRCINFO_EPOCH_RE.search(data)
|
||||
|
||||
pkgver = self._sanitize_scalar(pkgver_m.group("version")) if pkgver_m else ""
|
||||
pkgrel = self._sanitize_scalar(pkgrel_m.group("pkgrel")) if pkgrel_m else ""
|
||||
epoch = self._sanitize_scalar(epoch_m.group("epoch")) if epoch_m else None
|
||||
|
||||
return self._make_full_version(pkgver=pkgver, pkgrel=pkgrel, epoch=epoch)
|
||||
|
||||
def _read_upstream_full_version(self, directory: str) -> str:
|
||||
"""
|
||||
Determine the upstream full version for idempotency decisions.
|
||||
|
||||
The function prefers .SRCINFO (static metadata) and falls back to PKGBUILD parsing.
|
||||
If pkgrel cannot be determined, the function may return an epoch/pkgver-only key.
|
||||
"""
|
||||
self.module.log(f"Aur::_read_upstream_full_version(directory: {directory})")
|
||||
|
||||
srcinfo_path = os.path.join(directory, ".SRCINFO")
|
||||
if os.path.exists(srcinfo_path):
|
||||
v = self._read_srcinfo_full_version(srcinfo_path)
|
||||
if v:
|
||||
return v
|
||||
|
||||
pkgbuild_path = os.path.join(directory, "PKGBUILD")
|
||||
if os.path.exists(pkgbuild_path):
|
||||
v = self._read_pkgbuild_full_version(pkgbuild_path)
|
||||
if v:
|
||||
return v
|
||||
|
||||
return ""
|
||||
|
||||
def _read_pkgbuild_version_key(self, pkgbuild_path: str) -> str:
|
||||
"""
|
||||
Read epoch/pkgver from PKGBUILD and return a comparable version key.
|
||||
"""
|
||||
self.module.log(
|
||||
f"Aur::_read_pkgbuild_version_key(pkgbuild_path: {pkgbuild_path})"
|
||||
)
|
||||
|
||||
pkgver = self._read_pkgbuild_pkgver(pkgbuild_path)
|
||||
epoch = self._read_pkgbuild_epoch(pkgbuild_path)
|
||||
|
||||
return self._make_version_key(pkgver=pkgver, epoch=epoch)
|
||||
|
||||
def _read_srcinfo_version_key(self, srcinfo_path: str) -> str:
|
||||
"""
|
||||
Read epoch/pkgver from a .SRCINFO file.
|
||||
"""
|
||||
self.module.log(f"Aur::_read_srcinfo_version_key(srcinfo_path: {srcinfo_path})")
|
||||
|
||||
try:
|
||||
with open(srcinfo_path, "r", encoding="utf-8") as f:
|
||||
data = f.read()
|
||||
except OSError:
|
||||
return ""
|
||||
|
||||
pkgver_m = _SRCINFO_PKGVER_RE.search(data)
|
||||
epoch_m = _SRCINFO_EPOCH_RE.search(data)
|
||||
|
||||
pkgver = self._sanitize_scalar(pkgver_m.group("version")) if pkgver_m else ""
|
||||
epoch = self._sanitize_scalar(epoch_m.group("epoch")) if epoch_m else None
|
||||
|
||||
return self._make_version_key(pkgver=pkgver, epoch=epoch)
|
||||
|
||||
def _read_pkgbuild_epoch(self, pkgbuild_path: str) -> Optional[str]:
|
||||
"""
|
||||
Read epoch from a PKGBUILD file.
|
||||
"""
|
||||
self.module.log(f"Aur::_read_pkgbuild_epoch(pkgbuild_path: {pkgbuild_path})")
|
||||
|
||||
try:
|
||||
with open(pkgbuild_path, "r", encoding="utf-8") as f:
|
||||
data = f.read()
|
||||
except OSError as exc:
|
||||
self.module.log(msg=f"Unable to read PKGBUILD: {exc}")
|
||||
return None
|
||||
|
||||
m = _PKGBUILD_EPOCH_RE.search(data)
|
||||
|
||||
return self._sanitize_scalar(m.group("epoch")) if m else None
|
||||
|
||||
def _read_upstream_version_key(self, directory: str) -> str:
|
||||
"""
|
||||
Determine the upstream package version key for idempotency decisions.
|
||||
|
||||
The function prefers .SRCINFO (static metadata) and falls back to PKGBUILD
|
||||
parsing if .SRCINFO is missing.
|
||||
"""
|
||||
self.module.log(f"Aur::_read_upstream_version_key(directory: {directory})")
|
||||
|
||||
srcinfo_path = os.path.join(directory, ".SRCINFO")
|
||||
if os.path.exists(srcinfo_path):
|
||||
v = self._read_srcinfo_version_key(srcinfo_path)
|
||||
if v:
|
||||
return v
|
||||
|
||||
pkgbuild_path = os.path.join(directory, "PKGBUILD")
|
||||
if os.path.exists(pkgbuild_path):
|
||||
return self._read_pkgbuild_version_key(pkgbuild_path)
|
||||
|
||||
return ""
|
||||
|
||||
def _sanitize_scalar(self, value: str) -> str:
|
||||
"""
|
||||
Sanitize a scalar value extracted from PKGBUILD/.SRCINFO.
|
||||
|
||||
This removes surrounding quotes and trims whitespace. It is intentionally conservative
|
||||
and does not attempt to evaluate shell expansions or PKGBUILD functions.
|
||||
"""
|
||||
self.module.log(f"Aur::_sanitize_scalar(value: {value})")
|
||||
|
||||
v = value.strip()
|
||||
if (v.startswith('"') and v.endswith('"')) or (
|
||||
v.startswith("'") and v.endswith("'")
|
||||
):
|
||||
v = v[1:-1].strip()
|
||||
|
||||
return v
|
||||
|
||||
def _make_version_key(self, pkgver: str, epoch: Optional[str]) -> str:
|
||||
"""
|
||||
Build a comparable version key.
|
||||
|
||||
Pacman formats versions as: '<epoch>:<pkgver>-<pkgrel>' (epoch optional).
|
||||
This module compares '<epoch>:<pkgver>' (without pkgrel).
|
||||
"""
|
||||
self.module.log(f"Aur::_make_version_key(pkgver: {pkgver}, epoch: {epoch})")
|
||||
|
||||
pv = pkgver.strip()
|
||||
ep = self._sanitize_scalar(epoch) if epoch is not None else ""
|
||||
if ep and ep != "0":
|
||||
return f"{ep}:{pv}" if pv else f"{ep}:"
|
||||
|
||||
return pv
|
||||
|
||||
def _make_full_version(self, pkgver: str, pkgrel: str, epoch: Optional[str]) -> str:
|
||||
"""
|
||||
Build a comparable full version string.
|
||||
|
||||
The returned format matches pacman's version string:
|
||||
- "<epoch>:<pkgver>-<pkgrel>" (epoch optional)
|
||||
|
||||
If pkgrel is empty, the function falls back to an epoch/pkgver-only key.
|
||||
"""
|
||||
self.module.log(
|
||||
f"Aur::_make_full_version(pkgver: {pkgver}, pkgrel: {pkgrel}, epoch: {epoch})"
|
||||
)
|
||||
|
||||
pv = pkgver.strip()
|
||||
pr = pkgrel.strip()
|
||||
ep = self._sanitize_scalar(epoch) if epoch is not None else ""
|
||||
|
||||
base = f"{ep}:{pv}" if ep and ep != "0" else pv
|
||||
if not pr:
|
||||
return base
|
||||
|
||||
return f"{base}-{pr}" if base else ""
|
||||
|
||||
|
||||
# ===========================================
|
||||
# Module execution.
|
||||
# ===========================================
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Entrypoint for the Ansible module.
|
||||
"""
|
||||
args = dict(
|
||||
state=dict(default="present", choices=["present", "absent"]),
|
||||
repository=dict(type="str", required=False),
|
||||
name=dict(type="str", required=True),
|
||||
extra_args=dict(type="list", required=False),
|
||||
)
|
||||
module = AnsibleModule(
|
||||
argument_spec=args,
|
||||
supports_check_mode=False,
|
||||
)
|
||||
|
||||
aur = Aur(module)
|
||||
result = aur.run()
|
||||
|
||||
module.log(msg=f"= result: {result}")
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
# import module snippets
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2024, Bodo Schulz <bodo@boone-schulz.de>
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
DOCUMENTATION = r"""
|
||||
---
|
||||
module: check_mode
|
||||
version_added: 2.5.0
|
||||
author: "Bodo Schulz (@bodsch) <bodo@boone-schulz.de>"
|
||||
|
||||
short_description: Replacement for ansible_check_mode.
|
||||
|
||||
description:
|
||||
- Replacement for ansible_check_mode.
|
||||
- The magic variable `ansible_check_mode` was not defined with the correct value in some cases.
|
||||
|
||||
options:
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
- name: detect ansible check_mode
|
||||
bodsch.core.check_mode:
|
||||
register: _check_mode
|
||||
|
||||
- name: define check_mode
|
||||
ansible.builtin.set_fact:
|
||||
check_mode: '{{ _check_mode.check_mode }}'
|
||||
"""
|
||||
|
||||
RETURN = r"""
|
||||
check_mode:
|
||||
description:
|
||||
- Status for check_mode.
|
||||
type: bool
|
||||
"""
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class CheckMode(object):
|
||||
""" """
|
||||
|
||||
module = None
|
||||
|
||||
def __init__(self, module):
|
||||
""" """
|
||||
self.module = module
|
||||
|
||||
def run(self):
|
||||
""" """
|
||||
result = dict(failed=False, changed=False, check_mode=False)
|
||||
|
||||
if self.module.check_mode:
|
||||
result = dict(failed=False, changed=False, check_mode=True)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
args = dict()
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=args,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
o = CheckMode(module)
|
||||
result = o.run()
|
||||
|
||||
module.log(msg=f"= result: {result}")
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
# import module snippets
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,155 +0,0 @@
|
|||
"""
|
||||
deploy_and_activate.py
|
||||
|
||||
Deploy versioned binaries and activate them via symlinks.
|
||||
|
||||
Note:
|
||||
- When you want to deploy binaries that exist on the controller (remote_src=false scenario),
|
||||
use the action plugin of the same name (this collection provides it).
|
||||
- This module itself can only copy from a remote src_dir to install_dir (remote -> remote).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.versioned_deployment import (
|
||||
BinaryDeploy,
|
||||
)
|
||||
|
||||
DOCUMENTATION = r"""
|
||||
---
|
||||
module: deploy_and_activate
|
||||
short_description: Deploy versioned binaries and activate them via symlinks
|
||||
description:
|
||||
- Ensures binaries are present in a versioned install directory and activates them via symlinks.
|
||||
- Supports idempotent remote copy, permissions/ownership, Linux file capabilities, and symlink activation.
|
||||
- For controller-local sources, use the action plugin (same task name) shipped by this collection.
|
||||
options:
|
||||
install_dir:
|
||||
description:
|
||||
- Versioned installation directory (e.g. C(/opt/app/1.2.3)).
|
||||
type: path
|
||||
required: true
|
||||
link_dir:
|
||||
description:
|
||||
- Directory where activation symlinks are created (e.g. C(/usr/bin)).
|
||||
type: path
|
||||
default: /usr/bin
|
||||
src_dir:
|
||||
description:
|
||||
- Remote directory containing extracted binaries (required when C(copy=true)).
|
||||
type: path
|
||||
required: false
|
||||
copy:
|
||||
description:
|
||||
- If true, copy from C(src_dir) to C(install_dir) on the remote host (remote -> remote).
|
||||
- If false, assume binaries already exist in C(install_dir) and only enforce perms/caps/links.
|
||||
type: bool
|
||||
default: true
|
||||
items:
|
||||
description:
|
||||
- List of binaries to deploy.
|
||||
- Each item supports C(name), optional C(src), optional C(link_name), optional C(capability).
|
||||
type: list
|
||||
elements: dict
|
||||
required: true
|
||||
activation_name:
|
||||
description:
|
||||
- Item name or link_name used to determine "activated" status. Defaults to the first item.
|
||||
type: str
|
||||
required: false
|
||||
owner:
|
||||
description:
|
||||
- Owner name or uid for deployed binaries.
|
||||
type: str
|
||||
required: false
|
||||
group:
|
||||
description:
|
||||
- Group name or gid for deployed binaries.
|
||||
type: str
|
||||
required: false
|
||||
mode:
|
||||
description:
|
||||
- File mode (octal string).
|
||||
type: str
|
||||
default: "0755"
|
||||
cleanup_on_failure:
|
||||
description:
|
||||
- Remove install_dir if an error occurs during apply.
|
||||
type: bool
|
||||
default: true
|
||||
check_only:
|
||||
description:
|
||||
- If true, do not change anything; return whether an update would be needed.
|
||||
type: bool
|
||||
default: false
|
||||
author:
|
||||
- "Bodsch Core Collection"
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
- name: Deploy logstream_exporter (remote -> remote copy)
|
||||
bodsch.core.binary_deploy:
|
||||
src_dir: "/tmp/logstream_exporter"
|
||||
install_dir: "/opt/logstream_exporter/1.2.3"
|
||||
link_dir: "/usr/bin"
|
||||
copy: true
|
||||
owner: "logstream"
|
||||
group: "logstream"
|
||||
mode: "0755"
|
||||
items:
|
||||
- name: "logstream_exporter"
|
||||
capability: "cap_net_raw+ep"
|
||||
|
||||
- name: Only enforce symlinks/caps when files already exist in install_dir
|
||||
bodsch.core.binary_deploy:
|
||||
install_dir: "/opt/alertmanager/0.27.0"
|
||||
link_dir: "/usr/bin"
|
||||
copy: false
|
||||
items:
|
||||
- name: "alertmanager"
|
||||
- name: "amtool"
|
||||
"""
|
||||
|
||||
RETURN = r"""
|
||||
changed:
|
||||
description: Whether anything changed.
|
||||
type: bool
|
||||
activated:
|
||||
description: Whether the activation symlink points to the binary in install_dir.
|
||||
type: bool
|
||||
needs_update:
|
||||
description: In check_only/check_mode, indicates whether changes would be applied.
|
||||
type: bool
|
||||
plan:
|
||||
description: In check_only/check_mode, per-item flags for copy/perms/cap/link.
|
||||
type: dict
|
||||
results:
|
||||
description: In apply mode, per-item change information.
|
||||
type: dict
|
||||
"""
|
||||
|
||||
|
||||
def main() -> None:
|
||||
module = AnsibleModule(
|
||||
argument_spec={
|
||||
"install_dir": {"type": "path", "required": True},
|
||||
"link_dir": {"type": "path", "default": "/usr/bin"},
|
||||
"src_dir": {"type": "path", "required": False},
|
||||
"copy": {"type": "bool", "default": True},
|
||||
"items": {"type": "list", "elements": "dict", "required": True},
|
||||
"activation_name": {"type": "str", "required": False},
|
||||
"owner": {"type": "str", "required": False},
|
||||
"group": {"type": "str", "required": False},
|
||||
"mode": {"type": "str", "default": "0755"},
|
||||
"cleanup_on_failure": {"type": "bool", "default": True},
|
||||
"check_only": {"type": "bool", "default": False},
|
||||
},
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
BinaryDeploy(module).run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
"""
|
||||
binary_deploy_remote.py
|
||||
|
||||
Remote worker module for the binary_deploy action plugin.
|
||||
This module expects that src_dir (when copy=true) is available on the remote host.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.versioned_deployment import (
|
||||
BinaryDeploy,
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
module = AnsibleModule(
|
||||
argument_spec={
|
||||
"install_dir": {"type": "path", "required": True},
|
||||
"link_dir": {"type": "path", "default": "/usr/bin"},
|
||||
"src_dir": {"type": "path", "required": False},
|
||||
"copy": {"type": "bool", "default": True},
|
||||
"items": {"type": "list", "elements": "dict", "required": True},
|
||||
"activation_name": {"type": "str", "required": False},
|
||||
"owner": {"type": "str", "required": False},
|
||||
"group": {"type": "str", "required": False},
|
||||
"mode": {"type": "str", "default": "0755"},
|
||||
"cleanup_on_failure": {"type": "bool", "default": True},
|
||||
"check_only": {"type": "bool", "default": False},
|
||||
},
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
BinaryDeploy(module).run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,253 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2022, Bodo Schulz <bodo@boone-schulz.de>
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.easyrsa import EasyRSA
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.module_results import results
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
DOCUMENTATION = r"""
|
||||
---
|
||||
module: easyrsa
|
||||
version_added: 1.1.3
|
||||
author: "Bodo Schulz (@bodsch) <bodo@boone-schulz.de>"
|
||||
|
||||
short_description: Manage a Public Key Infrastructure (PKI) using EasyRSA.
|
||||
|
||||
description:
|
||||
- This module allows management of a PKI environment using EasyRSA.
|
||||
- It supports initialization of a PKI directory, creation of a Certificate Authority (CA),
|
||||
generation of certificate signing requests (CSR), signing of certificates, generation of
|
||||
a certificate revocation list (CRL), and generation of Diffie-Hellman (DH) parameters.
|
||||
- It is useful for automating the setup of secure communication infrastructure.
|
||||
|
||||
|
||||
options:
|
||||
pki_dir:
|
||||
description:
|
||||
- Path to the PKI directory where certificates and keys will be stored.
|
||||
required: false
|
||||
type: str
|
||||
|
||||
force:
|
||||
description:
|
||||
- If set to true, the existing PKI directory will be deleted and recreated.
|
||||
required: false
|
||||
type: bool
|
||||
default: false
|
||||
|
||||
req_cn_ca:
|
||||
description:
|
||||
- Common Name (CN) to be used for the CA certificate.
|
||||
required: false
|
||||
type: str
|
||||
|
||||
req_cn_server:
|
||||
description:
|
||||
- Common Name (CN) to be used for the server certificate request.
|
||||
required: false
|
||||
type: str
|
||||
|
||||
ca_keysize:
|
||||
description:
|
||||
- Key size (in bits) for the CA certificate.
|
||||
required: false
|
||||
type: int
|
||||
|
||||
dh_keysize:
|
||||
description:
|
||||
- Key size (in bits) for the Diffie-Hellman parameters.
|
||||
required: false
|
||||
type: int
|
||||
|
||||
working_dir:
|
||||
description:
|
||||
- Directory in which to execute the EasyRSA commands.
|
||||
- If not set, commands will be executed in the current working directory.
|
||||
required: false
|
||||
type: str
|
||||
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
- name: initialize easy-rsa - (this is going to take a long time)
|
||||
bodsch.core.easyrsa:
|
||||
pki_dir: '{{ openvpn_easyrsa.directory }}/pki'
|
||||
req_cn_ca: "{{ openvpn_certificate.req_cn_ca }}"
|
||||
req_cn_server: '{{ openvpn_certificate.req_cn_server }}'
|
||||
ca_keysize: 4096
|
||||
dh_keysize: "{{ openvpn_diffie_hellman_keysize }}"
|
||||
working_dir: '{{ openvpn_easyrsa.directory }}'
|
||||
force: true
|
||||
register: _easyrsa_result
|
||||
"""
|
||||
|
||||
RETURN = r"""
|
||||
changed:
|
||||
description: Indicates whether any changes were made during module execution.
|
||||
type: bool
|
||||
returned: always
|
||||
|
||||
failed:
|
||||
description: Indicates whether the module failed.
|
||||
type: bool
|
||||
returned: always
|
||||
|
||||
state:
|
||||
description: A detailed list of results from each EasyRSA operation.
|
||||
type: list
|
||||
elements: dict
|
||||
returned: always
|
||||
sample:
|
||||
- init-pki:
|
||||
failed: false
|
||||
changed: true
|
||||
msg: The PKI was successfully created.
|
||||
- build-ca:
|
||||
failed: false
|
||||
changed: true
|
||||
msg: ca.crt and ca.key were successfully created.
|
||||
- gen-crl:
|
||||
failed: false
|
||||
changed: true
|
||||
msg: crl.pem was successfully created.
|
||||
- gen-req:
|
||||
failed: false
|
||||
changed: true
|
||||
msg: server.req was successfully created.
|
||||
- sign-req:
|
||||
failed: false
|
||||
changed: true
|
||||
msg: server.crt was successfully created.
|
||||
- gen-dh:
|
||||
failed: false
|
||||
changed: true
|
||||
msg: dh.pem was successfully created.
|
||||
"""
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class EasyRsa(object):
|
||||
""" """
|
||||
|
||||
module = None
|
||||
|
||||
def __init__(self, module):
|
||||
""" """
|
||||
self.module = module
|
||||
|
||||
self.state = ""
|
||||
|
||||
self.force = module.params.get("force", False)
|
||||
self.pki_dir = module.params.get("pki_dir", None)
|
||||
self.req_cn_ca = module.params.get("req_cn_ca", None)
|
||||
self.req_cn_server = module.params.get("req_cn_server", None)
|
||||
self.ca_keysize = module.params.get("ca_keysize", None)
|
||||
self.dh_keysize = module.params.get("dh_keysize", None)
|
||||
self.working_dir = module.params.get("working_dir", None)
|
||||
|
||||
self.easyrsa = module.get_bin_path("easyrsa", True)
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
runner
|
||||
"""
|
||||
result_state = []
|
||||
|
||||
if self.working_dir:
|
||||
os.chdir(self.working_dir)
|
||||
|
||||
# self.module.log(msg=f"-> pwd : {os.getcwd()}")
|
||||
|
||||
if self.force:
|
||||
# self.module.log(msg="force mode ...")
|
||||
# self.module.log(msg=f"remove {self.pki_dir}")
|
||||
|
||||
if os.path.isdir(self.pki_dir):
|
||||
shutil.rmtree(self.pki_dir)
|
||||
|
||||
ersa = EasyRSA(
|
||||
module=self.module,
|
||||
force=self.force,
|
||||
pki_dir=self.pki_dir,
|
||||
req_cn_ca=self.req_cn_ca,
|
||||
req_cn_server=self.req_cn_server,
|
||||
ca_keysize=self.ca_keysize,
|
||||
dh_keysize=self.dh_keysize,
|
||||
working_dir=self.working_dir,
|
||||
)
|
||||
|
||||
steps = [
|
||||
("init-pki", ersa.create_pki),
|
||||
("build-ca", ersa.build_ca),
|
||||
("gen-crl", ersa.gen_crl),
|
||||
("gen-req", ersa.gen_req),
|
||||
("sign-req", ersa.sign_req),
|
||||
("gen-dh", ersa.gen_dh),
|
||||
]
|
||||
|
||||
for step_name, step_func in steps:
|
||||
self.module.log(msg=f" - {step_name}")
|
||||
rc, changed, msg = step_func()
|
||||
|
||||
result_state.append(
|
||||
{step_name: {"failed": rc != 0, "changed": changed, "msg": msg}}
|
||||
)
|
||||
if rc != 0:
|
||||
break
|
||||
|
||||
_state, _changed, _failed, state, changed, failed = results(
|
||||
self.module, result_state
|
||||
)
|
||||
|
||||
result = dict(changed=_changed, failed=failed, state=result_state)
|
||||
|
||||
return result
|
||||
|
||||
def list_files(self, startpath):
|
||||
for root, dirs, files in os.walk(startpath):
|
||||
level = root.replace(startpath, "").count(os.sep)
|
||||
indent = " " * 4 * (level)
|
||||
self.module.log(msg=f"{indent}{os.path.basename(root)}/")
|
||||
subindent = " " * 4 * (level + 1)
|
||||
for f in files:
|
||||
self.module.log(msg=f"{subindent}{f}")
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
args = dict(
|
||||
pki_dir=dict(required=False, type="str"),
|
||||
force=dict(required=False, default=False, type="bool"),
|
||||
req_cn_ca=dict(required=False),
|
||||
req_cn_server=dict(required=False),
|
||||
ca_keysize=dict(required=False, type="int"),
|
||||
dh_keysize=dict(required=False, type="int"),
|
||||
working_dir=dict(required=False),
|
||||
)
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=args,
|
||||
supports_check_mode=False,
|
||||
)
|
||||
|
||||
e = EasyRsa(module)
|
||||
result = e.run()
|
||||
|
||||
module.log(msg=f"= result: {result}")
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
# import module snippets
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,250 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.checksum import Checksum
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.directory import (
|
||||
create_directory,
|
||||
)
|
||||
from ansible_collections.bodsch.core.plugins.module_utils.file import chmod, remove_file
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
DOCUMENTATION = """
|
||||
module: facts
|
||||
version_added: 1.0.10
|
||||
author: "Bodo Schulz (@bodsch) <bodo@boone-schulz.de>"
|
||||
|
||||
short_description: Write Ansible Facts
|
||||
|
||||
description:
|
||||
- Write Ansible Facts
|
||||
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Whether to create (C(present)), or remove (C(absent)) a fact.
|
||||
required: false
|
||||
name:
|
||||
description:
|
||||
- The name of the fact.
|
||||
type: str
|
||||
required: true
|
||||
facts:
|
||||
description:
|
||||
- A dictionary with information to be written in the facts.
|
||||
type: dict
|
||||
required: true
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
- name: create custom facts
|
||||
bodsch.core.facts:
|
||||
state: present
|
||||
name: icinga2
|
||||
facts:
|
||||
version: "2.10"
|
||||
salt: fgmklsdfnjyxnvjksdfbkuser
|
||||
user: icinga2
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
msg:
|
||||
description: Module information
|
||||
type: str
|
||||
"""
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
TPL_FACT = """#!/usr/bin/env bash
|
||||
# generated by ansible
|
||||
cat <<EOF
|
||||
{{ item | tojson(indent=2) }}
|
||||
EOF
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class AnsibleFacts(object):
|
||||
"""
|
||||
Main Class
|
||||
"""
|
||||
|
||||
module = None
|
||||
|
||||
def __init__(self, module):
|
||||
"""
|
||||
Initialize all needed Variables
|
||||
"""
|
||||
self.module = module
|
||||
|
||||
self.verbose = module.params.get("verbose")
|
||||
self.state = module.params.get("state")
|
||||
self.name = module.params.get("name")
|
||||
self.facts = module.params.get("facts")
|
||||
self.append = module.params.get("append")
|
||||
|
||||
self.cache_directory = f"/var/cache/ansible/{self.name}"
|
||||
self.checksum_file = os.path.join(self.cache_directory, "facts.checksum")
|
||||
self.json_file = os.path.join(self.cache_directory, "facts.json")
|
||||
self.facts_directory = "/etc/ansible/facts.d"
|
||||
self.facts_file = os.path.join(self.facts_directory, f"{self.name}.fact")
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
runner
|
||||
"""
|
||||
create_directory(self.cache_directory)
|
||||
create_directory(self.facts_directory, mode="0775")
|
||||
|
||||
old_facts = {}
|
||||
|
||||
_failed = False
|
||||
_changed = False
|
||||
_msg = "There are no changes."
|
||||
|
||||
checksum = None
|
||||
|
||||
if self.state == "absent":
|
||||
for f in [self.checksum_file, self.json_file, self.facts_file]:
|
||||
if os.path.exists(f):
|
||||
remove_file(f)
|
||||
_changed = True
|
||||
_msg = "The facts have been successfully removed."
|
||||
|
||||
return dict(changed=_changed, msg=_msg)
|
||||
|
||||
checksum = Checksum(self.module)
|
||||
|
||||
if not os.path.exists(self.facts_file):
|
||||
if os.path.exists(self.checksum_file):
|
||||
os.remove(self.checksum_file)
|
||||
if os.path.exists(self.json_file):
|
||||
os.remove(self.json_file)
|
||||
|
||||
if os.path.exists(self.json_file):
|
||||
with open(self.json_file) as f:
|
||||
old_facts = json.load(f)
|
||||
|
||||
# self.module.log(f" old_facts : {old_facts}")
|
||||
|
||||
old_checksum = checksum.checksum(old_facts)
|
||||
new_checksum = checksum.checksum(self.facts)
|
||||
|
||||
changed = not (old_checksum == new_checksum)
|
||||
|
||||
# self.module.log(f" changed : {changed}")
|
||||
# self.module.log(f" new_checksum : {new_checksum}")
|
||||
# self.module.log(f" old_checksum : {old_checksum}")
|
||||
|
||||
if self.append and changed:
|
||||
old_facts.update(self.facts)
|
||||
changed = True
|
||||
|
||||
# self.module.log(f" facts : {self.facts}")
|
||||
|
||||
if not changed:
|
||||
return dict(
|
||||
changed=False,
|
||||
)
|
||||
|
||||
# Serializing json
|
||||
json_object = json.dumps(self.facts, indent=2)
|
||||
|
||||
# Writing to sample.json
|
||||
with open(self.facts_file, "w") as outfile:
|
||||
outfile.write("#!/usr/bin/env bash\n# generated by ansible\ncat <<EOF\n")
|
||||
|
||||
with open(self.facts_file, "a+") as outfile:
|
||||
outfile.write(json_object + "\nEOF\n")
|
||||
|
||||
with open(self.json_file, "w") as outfile:
|
||||
outfile.write(json.dumps(self.facts))
|
||||
|
||||
# write_template(self.facts_file, TPL_FACT, self.facts)
|
||||
chmod(self.facts_file, "0775")
|
||||
|
||||
checksum.write_checksum(self.checksum_file, new_checksum)
|
||||
|
||||
return dict(
|
||||
failed=_failed,
|
||||
changed=True,
|
||||
msg="The facts have been successfully written.",
|
||||
)
|
||||
|
||||
def __has_changed(self, data_file, checksum_file, data):
|
||||
""" """
|
||||
old_checksum = ""
|
||||
|
||||
if not os.path.exists(data_file) and os.path.exists(checksum_file):
|
||||
""" """
|
||||
os.remove(checksum_file)
|
||||
|
||||
if os.path.exists(checksum_file):
|
||||
with open(checksum_file, "r") as f:
|
||||
old_checksum = f.readlines()[0]
|
||||
|
||||
if isinstance(data, str):
|
||||
_data = sorted(data.split())
|
||||
_data = "\n".join(_data)
|
||||
|
||||
checksum = self.__checksum(_data)
|
||||
changed = not (old_checksum == checksum)
|
||||
|
||||
if self.force:
|
||||
changed = True
|
||||
old_checksum = ""
|
||||
|
||||
# self.module.log(msg=f" - new checksum '{checksum}'")
|
||||
# self.module.log(msg=f" - curr checksum '{old_checksum}'")
|
||||
# self.module.log(msg=f" - changed '{changed}'")
|
||||
|
||||
return changed, checksum, old_checksum
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
args = dict(
|
||||
state=dict(
|
||||
choices=[
|
||||
"present",
|
||||
"absent",
|
||||
],
|
||||
default="present",
|
||||
),
|
||||
name=dict(
|
||||
type="str",
|
||||
required=True,
|
||||
),
|
||||
facts=dict(
|
||||
type="dict",
|
||||
required=True,
|
||||
),
|
||||
append=dict(type="bool", required=False, default=True),
|
||||
)
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=args,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
obj = AnsibleFacts(module)
|
||||
result = obj.run()
|
||||
|
||||
module.log(msg=f"= result: {result}")
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
# import module snippets
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,216 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
DOCUMENTATION = """
|
||||
module: journalctl
|
||||
version_added: 1.0.6
|
||||
author: "Bodo Schulz (@bodsch) <bodo@boone-schulz.de>"
|
||||
|
||||
short_description: Query the systemd journal with a very limited number of possible parameters.
|
||||
|
||||
description:
|
||||
- Query the systemd journal with a very limited number of possible parameters.
|
||||
- In certain cases there are errors that are not clearly traceable but are logged in the journal.
|
||||
- This module is intended to be a tool for error analysis.
|
||||
|
||||
options:
|
||||
identifier:
|
||||
description:
|
||||
- Show entries with the specified syslog identifier
|
||||
type: str
|
||||
required: false
|
||||
unit:
|
||||
description:
|
||||
- Show logs from the specified unit
|
||||
type: str
|
||||
required: false
|
||||
lines:
|
||||
description:
|
||||
- Number of journal entries to show
|
||||
type: int
|
||||
required: false
|
||||
reverse:
|
||||
description:
|
||||
- Show the newest entries first
|
||||
type: bool
|
||||
required: false
|
||||
arguments:
|
||||
description:
|
||||
- A list of custom attributes
|
||||
type: list
|
||||
required: false
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
- name: chrony entries from journalctl
|
||||
journalctl:
|
||||
identifier: chrony
|
||||
lines: 50
|
||||
register: journalctl
|
||||
when:
|
||||
- ansible_facts.service_mgr == 'systemd'
|
||||
|
||||
- name: journalctl entries from this module
|
||||
journalctl:
|
||||
identifier: ansible-journalctl
|
||||
lines: 250
|
||||
register: journalctl
|
||||
when:
|
||||
- ansible_facts.service_mgr == 'systemd'
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
rc:
|
||||
description:
|
||||
- Return Value
|
||||
type: int
|
||||
cmd:
|
||||
description:
|
||||
- journalctl with the called parameters
|
||||
type: string
|
||||
stdout:
|
||||
description:
|
||||
- The output as a list on stdout
|
||||
type: list
|
||||
stderr:
|
||||
description:
|
||||
- The output as a list on stderr
|
||||
type: list
|
||||
"""
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class JournalCtl(object):
|
||||
""" """
|
||||
|
||||
module = None
|
||||
|
||||
def __init__(self, module):
|
||||
""" """
|
||||
self.module = module
|
||||
|
||||
self._journalctl = module.get_bin_path("journalctl", True)
|
||||
|
||||
self.unit = module.params.get("unit")
|
||||
self.identifier = module.params.get("identifier")
|
||||
self.lines = module.params.get("lines")
|
||||
self.reverse = module.params.get("reverse")
|
||||
self.arguments = module.params.get("arguments")
|
||||
|
||||
# module.log(msg="----------------------------")
|
||||
# module.log(msg=f" journalctl : {self._journalctl}")
|
||||
# module.log(msg=f" unit : {self.unit}")
|
||||
# module.log(msg=f" identifier : {self.identifier}")
|
||||
# module.log(msg=f" lines : {self.lines}")
|
||||
# module.log(msg=f" reverse : {self.reverse}")
|
||||
# module.log(msg=f" arguments : {self.arguments}")
|
||||
# module.log(msg="----------------------------")
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
runner
|
||||
"""
|
||||
result = dict(
|
||||
rc=1,
|
||||
failed=True,
|
||||
changed=False,
|
||||
)
|
||||
|
||||
result = self.journalctl_lines()
|
||||
|
||||
return result
|
||||
|
||||
def journalctl_lines(self):
|
||||
"""
|
||||
journalctl --help
|
||||
journalctl [OPTIONS...] [MATCHES...]
|
||||
|
||||
Query the journal.
|
||||
"""
|
||||
args = []
|
||||
args.append(self._journalctl)
|
||||
|
||||
if self.unit:
|
||||
args.append("--unit")
|
||||
args.append(self.unit)
|
||||
|
||||
if self.identifier:
|
||||
args.append("--identifier")
|
||||
args.append(self.identifier)
|
||||
|
||||
if self.lines:
|
||||
args.append("--lines")
|
||||
args.append(str(self.lines))
|
||||
|
||||
if self.reverse:
|
||||
args.append("--reverse")
|
||||
|
||||
if len(self.arguments) > 0:
|
||||
for arg in self.arguments:
|
||||
args.append(arg)
|
||||
|
||||
# self.module.log(msg=f" - args {args}")
|
||||
|
||||
rc, out, err = self._exec(args)
|
||||
|
||||
return dict(
|
||||
rc=rc,
|
||||
cmd=" ".join(args),
|
||||
stdout=out,
|
||||
stderr=err,
|
||||
)
|
||||
|
||||
def _exec(self, args):
|
||||
""" """
|
||||
rc, out, err = self.module.run_command(args, check_rc=False)
|
||||
|
||||
if rc != 0:
|
||||
self.module.log(msg=f" rc : '{rc}'")
|
||||
self.module.log(msg=f" out: '{out}'")
|
||||
self.module.log(msg=f" err: '{err}'")
|
||||
|
||||
return rc, out, err
|
||||
|
||||
|
||||
# ===========================================
|
||||
# Module execution.
|
||||
#
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
args = dict(
|
||||
identifier=dict(required=False, type="str"),
|
||||
unit=dict(required=False, type="str"),
|
||||
lines=dict(required=False, type="int"),
|
||||
reverse=dict(required=False, default=False, type="bool"),
|
||||
arguments=dict(required=False, default=[], type=list),
|
||||
)
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=args,
|
||||
supports_check_mode=False,
|
||||
)
|
||||
|
||||
k = JournalCtl(module)
|
||||
result = k.run()
|
||||
|
||||
module.log(msg=f"= result: {result}")
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
# import module snippets
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,293 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2020-2023, Bodo Schulz <bodo@boone-schulz.de>
|
||||
# Apache-2.0 (see LICENSE or https://opensource.org/license/apache-2-0)
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import os
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
except ImportError: # pragma: no cover
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.mysql import mysql_driver, mysql_driver_fail_msg
|
||||
from ansible.module_utils.six.moves import configparser
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
DOCUMENTATION = r"""
|
||||
module: mysql_schema
|
||||
version_added: '1.0.15'
|
||||
author: "Bodo Schulz (@bodsch) <bodo@boone-schulz.de>"
|
||||
|
||||
short_description: check the named schema exists in a mysql.
|
||||
|
||||
description:
|
||||
- check the named schema exists in a mysql (or compatible) database.
|
||||
|
||||
options:
|
||||
login_user:
|
||||
description:
|
||||
- user name to login into database.
|
||||
type: str
|
||||
required: false
|
||||
|
||||
login_password:
|
||||
description:
|
||||
- password for user name to login into database.
|
||||
type: str
|
||||
required: false
|
||||
|
||||
login_host:
|
||||
description:
|
||||
- database hostname
|
||||
type: str
|
||||
default: 127.0.0.1
|
||||
required: false
|
||||
|
||||
login_port:
|
||||
description:
|
||||
- database port
|
||||
type: int
|
||||
default: 3306
|
||||
required: false
|
||||
|
||||
login_unix_socket:
|
||||
description:
|
||||
- database socket
|
||||
type: str
|
||||
required: false
|
||||
|
||||
database_config_file:
|
||||
description:
|
||||
- optional config file with credentials
|
||||
type: str
|
||||
required: false
|
||||
|
||||
table_schema:
|
||||
description:
|
||||
- database schema to check
|
||||
type: str
|
||||
required: true
|
||||
|
||||
table_name:
|
||||
description:
|
||||
- optional table name
|
||||
type: str
|
||||
required: false
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
- name: ensure, table_schema is present
|
||||
bodsch.core.mysql_schema:
|
||||
login_host: '::1'
|
||||
login_user: root
|
||||
login_password: password
|
||||
table_schema: icingaweb2
|
||||
|
||||
- name: ensure table_schema is created
|
||||
bodsch.core.mysql_schema:
|
||||
login_host: database
|
||||
login_user: root
|
||||
login_password: root
|
||||
table_schema: icingadb
|
||||
register: mysql_icingawebdb_schema
|
||||
"""
|
||||
|
||||
RETURN = r"""
|
||||
exists:
|
||||
description:
|
||||
- is the named schema present
|
||||
type: bool
|
||||
changed:
|
||||
description: TODO
|
||||
type: bool
|
||||
failed:
|
||||
description: TODO
|
||||
type: bool
|
||||
"""
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MysqlSchema(object):
|
||||
""" """
|
||||
|
||||
module = None
|
||||
|
||||
def __init__(self, module):
|
||||
""" """
|
||||
self.module = module
|
||||
|
||||
self.login_user = module.params.get("login_user")
|
||||
self.login_password = module.params.get("login_password")
|
||||
self.login_host = module.params.get("login_host")
|
||||
self.login_port = module.params.get("login_port")
|
||||
self.login_unix_socket = module.params.get("login_unix_socket")
|
||||
self.database_config_file = module.params.get("database_config_file")
|
||||
self.table_schema = module.params.get("table_schema")
|
||||
self.table_name = module.params.get("table_name")
|
||||
|
||||
self.db_connect_timeout = 30
|
||||
|
||||
def run(self):
|
||||
""" """
|
||||
if mysql_driver is None:
|
||||
self.module.fail_json(msg=mysql_driver_fail_msg)
|
||||
else:
|
||||
warnings.filterwarnings("error", category=mysql_driver.Warning)
|
||||
|
||||
if not mysql_driver:
|
||||
return dict(failed=True, error=mysql_driver_fail_msg)
|
||||
|
||||
state, error, error_message = self._information_schema()
|
||||
|
||||
if error:
|
||||
res = dict(failed=True, changed=False, msg=error_message)
|
||||
else:
|
||||
res = dict(failed=False, changed=False, exists=state)
|
||||
|
||||
return res
|
||||
|
||||
def _information_schema(self):
|
||||
"""
|
||||
get informations about schema
|
||||
|
||||
return:
|
||||
state: bool (exists or not)
|
||||
count: int
|
||||
error: boot (error or not)
|
||||
error_message string error message
|
||||
"""
|
||||
cursor, conn, error, message = self.__mysql_connect()
|
||||
|
||||
if error:
|
||||
return None, error, message
|
||||
|
||||
query = f"SELECT TABLE_SCHEMA, TABLE_NAME FROM information_schema.tables where TABLE_SCHEMA = '{self.table_schema}'"
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
|
||||
except mysql_driver.ProgrammingError as e:
|
||||
errcode, message = e.args
|
||||
|
||||
message = f"Cannot execute SQL '{query}' : {to_native(e)}"
|
||||
self.module.log(msg=f"ERROR: {message}")
|
||||
|
||||
return False, True, message
|
||||
|
||||
records = cursor.fetchall()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
exists = len(records)
|
||||
|
||||
if self.table_name is not None:
|
||||
table_names = []
|
||||
for e in records:
|
||||
table_names.append(e[1])
|
||||
|
||||
if self.table_name in table_names:
|
||||
self.module.log(
|
||||
msg=f" - table name {self.table_name} exists in table schema"
|
||||
)
|
||||
|
||||
return True, False, None
|
||||
|
||||
else:
|
||||
self.module.log(msg=" - table schema exists")
|
||||
|
||||
if int(exists) >= 4:
|
||||
return True, False, None
|
||||
|
||||
return False, False, None
|
||||
|
||||
def __mysql_connect(self):
|
||||
""" """
|
||||
config = {}
|
||||
|
||||
config_file = self.database_config_file
|
||||
|
||||
if config_file and os.path.exists(config_file):
|
||||
config["read_default_file"] = config_file
|
||||
|
||||
# TODO
|
||||
# cp = self.__parse_from_mysql_config_file(config_file)
|
||||
|
||||
if self.login_unix_socket:
|
||||
config["unix_socket"] = self.login_unix_socket
|
||||
else:
|
||||
config["host"] = self.login_host
|
||||
config["port"] = self.login_port
|
||||
|
||||
# If login_user or login_password are given, they should override the
|
||||
# config file
|
||||
if self.login_user is not None:
|
||||
config["user"] = self.login_user
|
||||
if self.login_password is not None:
|
||||
config["passwd"] = self.login_password
|
||||
|
||||
if mysql_driver is None:
|
||||
self.module.fail_json(msg=mysql_driver_fail_msg)
|
||||
|
||||
try:
|
||||
db_connection = mysql_driver.connect(**config)
|
||||
|
||||
except Exception as e:
|
||||
message = "unable to connect to database. "
|
||||
message += "check login_host, login_user and login_password are correct "
|
||||
message += f"or {config_file} has the credentials. "
|
||||
message += f"Exception message: {to_native(e)}"
|
||||
|
||||
self.module.log(msg=message)
|
||||
|
||||
return (None, None, True, message)
|
||||
|
||||
return db_connection.cursor(), db_connection, False, "successful connected"
|
||||
|
||||
def __parse_from_mysql_config_file(self, cnf):
|
||||
cp = configparser.ConfigParser()
|
||||
cp.read(cnf)
|
||||
return cp
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
# Module execution.
|
||||
#
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
args = dict(
|
||||
login_user=dict(type="str"),
|
||||
login_password=dict(type="str", no_log=True),
|
||||
login_host=dict(type="str", default="127.0.0.1"),
|
||||
login_port=dict(type="int", default=3306),
|
||||
login_unix_socket=dict(type="str"),
|
||||
database_config_file=dict(required=False, type="path"),
|
||||
table_schema=dict(required=True, type="str"),
|
||||
table_name=dict(required=False, type="str"),
|
||||
)
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=args,
|
||||
supports_check_mode=False,
|
||||
)
|
||||
|
||||
schema = MysqlSchema(module)
|
||||
result = schema.run()
|
||||
|
||||
module.log(msg=f"= result : '{result}'")
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
# import module snippets
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue