Adding Netbox
This commit is contained in:
@@ -0,0 +1,137 @@
|
||||
# (c) 2018, Ansible by Red Hat, inc
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'network'}
|
||||
|
||||
DOCUMENTATION = """
|
||||
---
|
||||
module: extract_banners
|
||||
author: Ansible Network Team
|
||||
short_description: remove banners from config text
|
||||
description:
|
||||
- The config text specified in C(config) will be used to extract banners
|
||||
from it. Banners need to be executed on device in special manner. It
|
||||
returns configs with banner removed and a dictionary of banners
|
||||
version_added: "2.7"
|
||||
options:
|
||||
config:
|
||||
description:
|
||||
- Config text from which banners need to be extracted.
|
||||
required: yes
|
||||
default: null
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
- name: extract multiline banners
|
||||
extract_banners:
|
||||
config: "{{ ios_config_text }}"
|
||||
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
config:
|
||||
description: returns the config with masked banners
|
||||
returned: always
|
||||
type: str
|
||||
banners:
|
||||
description: returns the extracted banners
|
||||
returned: always
|
||||
type: dict
|
||||
"""
|
||||
import re
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.errors import AnsibleError
|
||||
|
||||
try:
|
||||
from __main__ import display
|
||||
except ImportError:
|
||||
from ansible.utils.display import Display
|
||||
display = Display()
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
''' handler for extract_banners '''
|
||||
|
||||
if task_vars is None:
|
||||
task_vars = dict()
|
||||
|
||||
result = super(ActionModule, self).run(tmp, task_vars)
|
||||
del tmp # tmp no longer has any effect
|
||||
|
||||
try:
|
||||
config = self._task.args['config']
|
||||
except KeyError as exc:
|
||||
raise AnsibleError(to_text(exc))
|
||||
|
||||
# make config required argument
|
||||
if not config:
|
||||
raise AnsibleError('missing required argument `config`')
|
||||
|
||||
banners, masked_config = self._extract_banners(config)
|
||||
result['config'] = masked_config
|
||||
result['banners'] = banners
|
||||
return result
|
||||
|
||||
def _extract_banners(self, config):
|
||||
config_lines = config.split('\n')
|
||||
found_banner_start = 0
|
||||
banner_meta = []
|
||||
for linenum, line in enumerate(config_lines):
|
||||
if not found_banner_start:
|
||||
banner_start = re.search(r'^banner\s+(\w+)\s+(.*)', line)
|
||||
if banner_start:
|
||||
banner_cmd = banner_start.group(1)
|
||||
try:
|
||||
banner_delimiter = banner_start.group(2)
|
||||
banner_delimiter = banner_delimiter.strip()
|
||||
banner_delimiter_esc = re.escape(banner_delimiter)
|
||||
except Exception:
|
||||
continue
|
||||
banner_start_index = linenum
|
||||
found_banner_start = 1
|
||||
continue
|
||||
|
||||
if found_banner_start:
|
||||
# Search for delimiter found in current banner start
|
||||
regex = r'%s' % banner_delimiter_esc
|
||||
banner_end = re.search(regex, line)
|
||||
if banner_end:
|
||||
found_banner_start = 0
|
||||
kwargs = {
|
||||
'banner_cmd': banner_cmd,
|
||||
'banner_delimiter': banner_delimiter,
|
||||
'banner_start_index': banner_start_index,
|
||||
'banner_end_index': linenum,
|
||||
}
|
||||
banner_meta.append(kwargs)
|
||||
|
||||
# Build banners from extracted data
|
||||
banner_lines = []
|
||||
for banner in banner_meta:
|
||||
banner_lines.append('banner %s %s' % (banner['banner_cmd'],
|
||||
banner['banner_delimiter']))
|
||||
banner_conf_lines = config_lines[banner['banner_start_index'] + 1: banner['banner_end_index']]
|
||||
for index, conf_line in enumerate(banner_conf_lines):
|
||||
banner_lines.append(conf_line)
|
||||
banner_lines.append('%s' % banner['banner_delimiter'])
|
||||
|
||||
# Delete banner lines from config
|
||||
for banner in banner_meta:
|
||||
banner_lines_range = range(banner['banner_start_index'],
|
||||
banner['banner_end_index'] + 1)
|
||||
for index in banner_lines_range:
|
||||
config_lines[index] = '! banner removed'
|
||||
|
||||
configs = '\n'.join(config_lines)
|
||||
return (banner_lines, configs)
|
||||
@@ -0,0 +1,111 @@
|
||||
# (c) 2018, Ansible by Red Hat, inc
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'network'}
|
||||
import re
|
||||
import base64
|
||||
import hashlib
|
||||
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
||||
|
||||
class UserManager:
|
||||
|
||||
def __init__(self, new_users, user_config_data):
|
||||
self.__new_users = new_users
|
||||
self.__user_config_data = user_config_data
|
||||
|
||||
@staticmethod
|
||||
def calculate_fingerprint(sshkey):
|
||||
if ' ' in sshkey:
|
||||
keyparts = sshkey.split(' ')
|
||||
keyparts[1] = hashlib.md5(base64.b64decode(keyparts[1])).hexdigest().upper()
|
||||
return ' '.join(keyparts)
|
||||
|
||||
else:
|
||||
return 'ssh-rsa %s' % hashlib.md5(base64.b64decode(sshkey)).hexdigest().upper()
|
||||
|
||||
def _parse_view(self, data):
|
||||
match = re.search(r'view (\S+)', data, re.M)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
def _parse_sshkey(self, data):
|
||||
match = re.search(r'key-hash (\S+ \S+(?: .+)?)$', data, re.M)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
def _parse_privilege(self, data):
|
||||
match = re.search(r'privilege (\S+)', data, re.M)
|
||||
if match:
|
||||
return int(match.group(1))
|
||||
|
||||
def generate_existing_users(self):
|
||||
match = re.findall(r'(?:^(?:u|\s{2}u))sername (\S+)', self.__user_config_data, re.M)
|
||||
if not match:
|
||||
return []
|
||||
|
||||
existing_users = []
|
||||
|
||||
for user in set(match):
|
||||
regex = r'username %s .+$' % user
|
||||
cfg = re.findall(regex, self.__user_config_data, re.M)
|
||||
cfg = '\n'.join(cfg)
|
||||
sshregex = r'username %s\n\s+key-hash .+$' % user
|
||||
sshcfg = re.findall(sshregex, self.__user_config_data, re.M)
|
||||
sshcfg = '\n'.join(sshcfg)
|
||||
|
||||
obj = {
|
||||
'name': user,
|
||||
'sshkey': self._parse_sshkey(sshcfg),
|
||||
'privilege': self._parse_privilege(cfg),
|
||||
'view': self._parse_view(cfg)
|
||||
}
|
||||
|
||||
filtered = {k: v for k, v in obj.items() if v is not None}
|
||||
obj.clear()
|
||||
obj.update(filtered)
|
||||
|
||||
existing_users.append(obj)
|
||||
|
||||
return existing_users
|
||||
|
||||
def filter_users(self):
|
||||
want = self.__new_users
|
||||
for user in want:
|
||||
if 'sshkey' in user:
|
||||
user['sshkey'] = self.calculate_fingerprint(user['sshkey'])
|
||||
|
||||
have = self.generate_existing_users()
|
||||
filtered_users = [x for x in want if x not in have]
|
||||
|
||||
changed = True if len(filtered_users) > 0 else False
|
||||
|
||||
return changed, filtered_users
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
if task_vars is None:
|
||||
task_vars = dict()
|
||||
|
||||
result = super(ActionModule, self).run(tmp, task_vars)
|
||||
|
||||
try:
|
||||
new_users = self._task.args['new_users']
|
||||
user_config_data = self._task.args['user_config']
|
||||
except KeyError as exc:
|
||||
return {'failed': True, 'msg': 'missing required argument: %s' % exc}
|
||||
|
||||
result['changed'], result['stdout'] = UserManager(new_users, user_config_data).filter_users()
|
||||
|
||||
return result
|
||||
@@ -0,0 +1,262 @@
|
||||
# (c) 2018, Ansible Inc,
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import copy
|
||||
import os
|
||||
import time
|
||||
import re
|
||||
import hashlib
|
||||
import netaddr
|
||||
import json
|
||||
import socket
|
||||
|
||||
from ansible.module_utils._text import to_bytes, to_text
|
||||
from ansible.module_utils.connection import Connection
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlsplit
|
||||
from ansible.utils.path import unfrackpath
|
||||
|
||||
try:
|
||||
from __main__ import display
|
||||
except ImportError:
|
||||
from ansible.utils.display import Display
|
||||
display = Display()
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
result = super(ActionModule, self).run(task_vars=task_vars)
|
||||
|
||||
try:
|
||||
show_acl_output_buffer = self._task.args.get('show_acl_output_buffer')
|
||||
except KeyError as exc:
|
||||
return {'failed': True, 'msg': 'missing required argument: %s' % exc}
|
||||
|
||||
try:
|
||||
parser = self._task.args.get('parser')
|
||||
except KeyError as exc:
|
||||
return {'failed': True, 'msg': 'missing required argument: %s' % exc}
|
||||
|
||||
try:
|
||||
generated_flow_file = self._task.args.get('generated_flow_file')
|
||||
except KeyError as exc:
|
||||
return {'failed': True, 'msg': 'missing required argument: %s' % exc}
|
||||
|
||||
generated_flow_file = unfrackpath(generated_flow_file)
|
||||
dest = generated_flow_file
|
||||
|
||||
parser = unfrackpath(parser)
|
||||
if not os.path.exists(parser):
|
||||
return {'failed': True, 'msg': 'path: %s does not exist.' % parser}
|
||||
parser_file = parser
|
||||
|
||||
pd_json = self._parse_acl_with_textfsm(
|
||||
parser_file, show_acl_output_buffer)
|
||||
try:
|
||||
changed = self._write_packet_dict(dest, pd_json)
|
||||
except IOError as exc:
|
||||
result['failed'] = True
|
||||
result['msg'] = ('Exception received : %s' % exc)
|
||||
|
||||
result['changed'] = changed
|
||||
if changed:
|
||||
result['destination'] = dest
|
||||
else:
|
||||
result['dest_unchanged'] = dest
|
||||
|
||||
return result
|
||||
|
||||
def _create_packet_dict(self, cmd_out):
|
||||
import warnings
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
from trigger.acl import parse
|
||||
import netaddr
|
||||
import json
|
||||
import uuid
|
||||
|
||||
# pd is list of dictionary of packets
|
||||
pd = []
|
||||
lines = cmd_out.split('\n')
|
||||
for index, line in enumerate(lines):
|
||||
line = to_bytes(line, errors='surrogate_or_strict')
|
||||
pd_it = {}
|
||||
try:
|
||||
p = parse(line)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if p.terms:
|
||||
match = p.terms[0].match
|
||||
for key in match:
|
||||
if key == 'source-address':
|
||||
for m in match["source-address"]:
|
||||
v = netaddr.IPNetwork(str(m))
|
||||
# Return the host in middle of subnet
|
||||
size_subnet = v.size
|
||||
host_index = int(size_subnet / 2)
|
||||
pd_it["src"] = str(v[host_index])
|
||||
if key == 'destination-address':
|
||||
for m in match["destination-address"]:
|
||||
v = netaddr.IPNetwork(str(m))
|
||||
# Return the host in middle of subnet
|
||||
size_subnet = v.size
|
||||
host_index = int(size_subnet / 2)
|
||||
pd_it["dst"] = str(v[host_index])
|
||||
if key == 'protocol':
|
||||
for m in match['protocol']:
|
||||
pd_it["proto"] = str(m)
|
||||
if key == 'destination-port':
|
||||
for m in match["destination-port"]:
|
||||
pd_it['dst_port'] = str(m)
|
||||
if key == 'source-port':
|
||||
for m in match["source-port"]:
|
||||
pd_it['src_port'] = str(m)
|
||||
|
||||
action = p.terms[0].action
|
||||
for act in action:
|
||||
pd_it["action"] = act
|
||||
|
||||
if pd_it is not None:
|
||||
if "dst" not in pd_it:
|
||||
pd_it["dst"] = "any"
|
||||
if "src" not in pd_it:
|
||||
pd_it["src"] = "any"
|
||||
pd_it["service_line_index"] = str(index)
|
||||
pd.append(pd_it)
|
||||
|
||||
return json.dumps(pd, indent=4)
|
||||
|
||||
def _write_packet_dict(self, dest, contents):
|
||||
# Check for Idempotency
|
||||
if os.path.exists(dest):
|
||||
try:
|
||||
with open(dest, 'r') as f:
|
||||
old_content = f.read()
|
||||
except IOError as ioexc:
|
||||
raise IOError(ioexc)
|
||||
sha1 = hashlib.sha1()
|
||||
old_content_b = to_bytes(old_content, errors='surrogate_or_strict')
|
||||
sha1.update(old_content_b)
|
||||
checksum_old = sha1.digest()
|
||||
|
||||
sha1 = hashlib.sha1()
|
||||
new_content_b = to_bytes(contents, errors='surrogate_or_strict')
|
||||
sha1.update(new_content_b)
|
||||
checksum_new = sha1.digest()
|
||||
if checksum_old == checksum_new:
|
||||
return (False)
|
||||
|
||||
try:
|
||||
with open(dest, 'w') as f:
|
||||
f.write(contents)
|
||||
except IOError as ioexc:
|
||||
raise IOError(ioexc)
|
||||
|
||||
return (True)
|
||||
|
||||
def _parse_acl_with_textfsm(self, parser_file, output):
|
||||
import textfsm
|
||||
tmp = open(parser_file)
|
||||
re_table = textfsm.TextFSM(tmp)
|
||||
results = re_table.ParseText(output)
|
||||
fsm_results = []
|
||||
for item in results:
|
||||
facts = {}
|
||||
facts.update(dict(zip(re_table.header, item)))
|
||||
fsm_results.append(facts)
|
||||
|
||||
pd = []
|
||||
parsed_acl = []
|
||||
# Convert dictionary of terms into flows dictionary
|
||||
for term in fsm_results:
|
||||
pd_it = {}
|
||||
original_terms = {}
|
||||
for k, v in term.items():
|
||||
if k == 'LINE_NUM' and v == '':
|
||||
# Empty line with just name
|
||||
continue
|
||||
elif k == 'LINE_NUM' and v != '':
|
||||
pd_it["service_line_index"] = v
|
||||
original_terms["service_line_index"] = v
|
||||
if k == 'PROTOCOL' and v != '':
|
||||
pd_it["proto"] = v
|
||||
original_terms['proto'] = v
|
||||
if k == 'ACTION' and v != '':
|
||||
pd_it["action"] = v
|
||||
original_terms['action'] = v
|
||||
if k == 'SRC_NETWORK' and v != '':
|
||||
if 'SRC_WILDCARD' in term:
|
||||
src_mask = term['SRC_WILDCARD']
|
||||
src_invert_mask = sum([bin(255 - int(x)).count("1") for x in
|
||||
src_mask.split(".")])
|
||||
else:
|
||||
src_invert_mask = '32'
|
||||
cidr = "%s/%s" % (v, src_invert_mask)
|
||||
src_ip = netaddr.IPNetwork(cidr)
|
||||
size_subnet = src_ip.size
|
||||
host_index = int(size_subnet / 2)
|
||||
pd_it['src'] = str(src_ip[host_index])
|
||||
original_terms['src'] = src_ip
|
||||
if k == 'SRC_ANY' and v != '':
|
||||
pd_it['src'] = "any"
|
||||
original_terms['src'] = netaddr.IPNetwork('0.0.0.0/0')
|
||||
if k == 'SRC_HOST' and v != '':
|
||||
pd_it['src'] = v
|
||||
original_terms['src'] = v
|
||||
if k == 'SRC_PORT' and v != '':
|
||||
if not v[0].isdigit():
|
||||
v = str(socket.getservbyname(v))
|
||||
pd_it['src_port'] = v
|
||||
original_terms['src_port'] = v
|
||||
if k == 'DST_NETWORK' and v != '':
|
||||
if 'DST_WILDCARD' in term:
|
||||
dst_mask = term['DST_WILDCARD']
|
||||
dst_invert_mask = sum([bin(255 - int(x)).count("1") for x in
|
||||
dst_mask.split(".")])
|
||||
else:
|
||||
dst_invert_mask = '32'
|
||||
d_cidr = "%s/%s" % (v, dst_invert_mask)
|
||||
dst_ip = netaddr.IPNetwork(d_cidr)
|
||||
d_size_subnet = dst_ip.size
|
||||
d_host_index = int(d_size_subnet / 2)
|
||||
pd_it['dst'] = str(dst_ip[d_host_index])
|
||||
original_terms['dst'] = dst_ip
|
||||
if k == 'DST_ANY' and v != '':
|
||||
pd_it['dst'] = "any"
|
||||
original_terms['dst'] = netaddr.IPNetwork('0.0.0.0/0')
|
||||
if k == 'DST_HOST' and v != '':
|
||||
pd_it['dst'] = v
|
||||
original_terms['dst'] = v
|
||||
if k == 'DST_PORT' and v != '':
|
||||
if not v[0].isdigit():
|
||||
v = str(socket.getservbyname(v))
|
||||
pd_it['dst_port'] = v
|
||||
original_terms['dst_port'] = v
|
||||
|
||||
if pd_it:
|
||||
pd.append(pd_it)
|
||||
if original_terms:
|
||||
parsed_acl.append(original_terms)
|
||||
|
||||
# Store parsed acl on this object for later processing
|
||||
self._parsed_acl = parsed_acl
|
||||
return json.dumps(pd, indent=4)
|
||||
Reference in New Issue
Block a user