trafficcontrol-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jhg...@apache.org
Subject [trafficcontrol] 04/04: Refactor the external python association script into a custom filter plugin
Date Wed, 18 Sep 2019 17:26:12 GMT
This is an automated email from the ASF dual-hosted git repository.

jhg03a pushed a commit to branch ansible.refactor
in repository https://gitbox.apache.org/repos/asf/trafficcontrol.git

commit 3730297a80d45fb6ea4d0abd6db7441ee6ee48e5
Author: Jonathan Gray <Jonathan_Gray@comcast.com>
AuthorDate: Wed Sep 18 11:25:58 2019 -0600

    Refactor the external python association script into a custom filter plugin
    
    This significantly helps with code cleanup/clarity.
---
 .../roles/dataset_loader/files/selection.set.py    |  49 ------
 .../filter_plugins/dsloader.filter.util.py         |  36 +++++
 .../roles/dataset_loader/tasks/dataset_loader.yml  | 164 +++------------------
 .../dataset_loader/tasks/update_mso_servers.yml    |   9 +-
 4 files changed, 61 insertions(+), 197 deletions(-)

diff --git a/infrastructure/ansible/roles/dataset_loader/files/selection.set.py b/infrastructure/ansible/roles/dataset_loader/files/selection.set.py
deleted file mode 100755
index 05ee37c..0000000
--- a/infrastructure/ansible/roles/dataset_loader/files/selection.set.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/python
-
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This script is used to provide a round-robin merging of two lists
-
-import sys
-import json
-
-if len(sys.argv) < 3 or len(sys.argv) > 4:
-    print "{}"
-    sys.exit(0)
-
-cdn_csv_list = sys.argv[1].split(',')
-fqdn_csv_list = sys.argv[2].split(',')
-option = ''
-if len(sys.argv) == 4:
-    option = sys.argv[3]
-cdn_csv_list.sort()
-fqdn_csv_list.sort()
-
-step_size = len(cdn_csv_list)
-out_list_normal = {}
-for i, val in enumerate(cdn_csv_list):
-    sublist = fqdn_csv_list[i:]
-    out_list_normal[val] = ','.join(sublist[::step_size])
-
-out_list_denormal = {}
-for val, csvlist in out_list_normal.items():
-    for i in csvlist.split(','):
-        if i != "":
-            out_list_denormal[i] = val
-
-if option == 'denormalize':
-    print json.dumps(out_list_denormal)
-else:
-    print json.dumps(out_list_normal)
diff --git a/infrastructure/ansible/roles/dataset_loader/filter_plugins/dsloader.filter.util.py
b/infrastructure/ansible/roles/dataset_loader/filter_plugins/dsloader.filter.util.py
new file mode 100644
index 0000000..eb8a9b1
--- /dev/null
+++ b/infrastructure/ansible/roles/dataset_loader/filter_plugins/dsloader.filter.util.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class FilterModule(object):
+    def filters(self):
+        return {
+            'associate_round_robin': self.associate_round_robin,
+            'denormalize_association': self.denormalize_association
+        }
+    def associate_round_robin(self, set1, set2):
+        step_size = len(set1)
+        out_list_normal = {}
+        for i, val in enumerate(set1):
+            # Offset the beginning of the second set by the index of where we are processing
the first set
+            sublist = set2[i:]
+            # Snag every Nth element from the offset sublist and associte that with set1
+            out_list_normal[val] = sublist[::step_size]
+        return out_list_normal
+
+    def denormalize_association(self, association):
+        out = {}
+        for key, vals in association.items():
+            for val in vals:
+                out[val] = key
+        return out
diff --git a/infrastructure/ansible/roles/dataset_loader/tasks/dataset_loader.yml b/infrastructure/ansible/roles/dataset_loader/tasks/dataset_loader.yml
index b61054f..b6cee3e 100644
--- a/infrastructure/ansible/roles/dataset_loader/tasks/dataset_loader.yml
+++ b/infrastructure/ansible/roles/dataset_loader/tasks/dataset_loader.yml
@@ -236,116 +236,34 @@
     method: GET
   register: get_all_cachegroups
 
-- name: Associate cachegroups to phys_locations
-  command: "{{ role_path }}/files/selection.set.py {{ item | quote }} {{ dl_ds_merged_divisions
| json_query(physlocation_query) | join(',') | quote }}"
-  register: cachegroup_physlocations_out
-  changed_when: false
-  with_items: "{{ dl_ds_merged_divisions | json_query(cachegroup_query) | unique }}"
-  vars:
-    cachegroup_query: "[*].regions[*].phys_locations[*].cachegroups[][][]"
-    physlocation_query: "[*].regions[*].phys_locations[?contains(cachegroups,'{{ item }}')].name[][]"
-
-- name: Massage cachegroup to phys_location mappings
-  set_fact:
-    physlocation_tmp_str: "{% for i in cachegroup_physlocations_out.results %}{{ i.stdout
| from_json | to_json }}{% if not loop.last %}|{% endif %}{%endfor%}"
-
-- name: Build temporary association of cachegroup to region
-  set_fact:
-    phys_location_tmp_dict: "{{ (phys_location_tmp_dict| default({})) | combine(item | from_json)
}}"
-  with_items: "{{ physlocation_tmp_str.split('|') }}"
-
-- name: Associate origins to cachegroups
-  command: "{{ role_path }}/files/selection.set.py {{ dl_ds_merged_cachegroups | selectattr('type',
'equalto', 'ORG_LOC') | map(attribute='name') | join(',') | quote }} {{ groups['fakeorigin']
| union(groups['mso_parent_alias']) | join(',') | quote }}"
-  register: cachegroup_origin_out
-  changed_when: false
-
-- name: Associate edge caches to cachegroups
-  command: "{{ role_path }}/files/selection.set.py {{ dl_ds_merged_cachegroups | selectattr('type',
'equalto', 'EDGE_LOC') | map(attribute='name') | join(',') | quote }} {{ groups['atsec'] |
union(groups['grove']) | join(',') | quote }}"
-  register: cachegroup_edge_out
-  changed_when: false
-
-- name: Associate mid caches to cachegroups
-  command: "{{ role_path }}/files/selection.set.py {{ dl_ds_merged_cachegroups | selectattr('type',
'equalto', 'MID_LOC') | map(attribute='name') | join(',') | quote }} {{ groups['atsmid'] |
join(',') | quote }}"
-  register: cachegroup_mid_out
-  changed_when: false
-
-- name: Associate traffic routers to cachegroups
-  command: "{{ role_path }}/files/selection.set.py {{ dl_ds_merged_cachegroups | selectattr('type',
'equalto', 'TR_LOC') | map(attribute='name') | join(',') | quote }} {{ groups['traffic_router']
| join(',') | quote }}"
-  register: cachegroup_tr_out
-  changed_when: false
-
-# List of Map of Cachegroup to Host List
-- name: Combine cachegroup associations
-  set_fact:
-    cachegroups_merged_tmp: "{{ cachegroups_merged_tmp | default([]) + [ { 'cachegroup':
item.key, 'hosts': item.value.split(',') } ] }}"
-  with_dict: "{{ (cachegroup_edge_out.stdout | from_json) | combine(cachegroup_mid_out.stdout
| from_json) | combine(cachegroup_tr_out.stdout | from_json) | combine(cachegroup_origin_out.stdout
| from_json) }}"
-
 # Map of hosts to cachegroups
-- name: Convert cachegroup associations to a more useful format
-  set_fact:
-    cachegroups_assignments: "{{ cachegroups_assignments | default({}) | combine({ item.1:
item.0.cachegroup }) }}"
-  with_subelements:
-    - "{{ cachegroups_merged_tmp }}"
-    - hosts
-
-- name: Hold onto a copy of infrastructure hosts
-  set_fact:
-    infra_hosts: "{{ groups['all'] | difference(cachegroups_assignments.keys()) }}"
-
-- name: Add remaining hosts to the infrastructure cachegroup
-  set_fact:
-    cachegroups_assignments: "{{ cachegroups_assignments | default({}) | combine({ item:
'infrastructure' }) }}"
-  with_items: "{{ infra_hosts }}"
-
-- name: Add remaining hosts to the infrastructure physical locations
-  set_fact:
-    cachegroups_merged_tmp: "{{ cachegroups_merged_tmp | default([]) + [{ 'cachegroup': 'infrastructure',
'hosts': infra_hosts }] }}"
-
-- name: Massage cachegroup associations to a forward lookup dictionary
-  set_fact:
-    cachegroup_lookup: "{{ ( cachegroup_lookup | default({}) ) | combine({ item['cachegroup']:
(item['hosts'] | join(',')) }) }}"
-  with_items: "{{ cachegroups_merged_tmp }}"
-
-- name: Associate Physical Locations to Servers
-  command: "{{ role_path }}/files/selection.set.py {{ phys_location_tmp_dict[item.key] |
quote }} {{ item.value | quote }}"
-  register: physlocation_caches_out
-  changed_when: false
-  with_dict: "{{ cachegroup_lookup }}"
-
-- name: Massage phys_location to cache mappings
-  set_fact:
-    physlocation_cache_tmp_str: "{% for i in physlocation_caches_out.results %}{{ i.stdout
| from_json | to_json }}{% if not loop.last %}|{% endif %}{%endfor%}"
-
-- name: Process location map (phase 1)
-  set_fact:
-    physlocation_cache_tmp_list: "{{ physlocation_cache_tmp_list | default([]) | union( [item
 | from_json] ) }}"
-  with_items: "{{ physlocation_cache_tmp_str.split('|') }}"
-
-- name: Process location map (phase 2)
+- name: Associate origins, caches, and routers to cachegroups
   set_fact:
-    used_locations: "{{ physlocation_cache_tmp_list | json_query(location_query) | unique
}}"
+    cachegroups_assignments: "{{ {} |combine(origins) | combine(edges) | combine(mids) |
combine(trs) | combine(infra) }}"
   vars:
-    location_query: "[].keys(@)[]"
+    origins: "{{ dl_ds_merged_cachegroups | selectattr('type', 'equalto', 'ORG_LOC') | map(attribute='name')
| list | associate_round_robin(groups['fakeorigin'] | union(groups['mso_parent_alias'])) |
denormalize_association }}"
+    edges: "{{ dl_ds_merged_cachegroups | selectattr('type', 'equalto', 'EDGE_LOC') | map(attribute='name')
| list | associate_round_robin(groups['atsec'] | union(groups['grove'])) | denormalize_association
}}"
+    mids: "{{ dl_ds_merged_cachegroups | selectattr('type', 'equalto', 'MID_LOC') | map(attribute='name')
| list | associate_round_robin(groups['atsmid']) | denormalize_association }}"
+    trs: "{{ dl_ds_merged_cachegroups | selectattr('type', 'equalto', 'TR_LOC') | map(attribute='name')
| list | associate_round_robin(groups['traffic_router']) | denormalize_association }}"
+    infra: "{{ ['infrastructure'] | associate_round_robin(groups['all'] | difference(groups['fakeorigin']
+ groups['mso_parent_alias'] + groups['atsec'] + groups['grove'] + groups['atsmid'] + groups['traffic_router'])
) | denormalize_association }}"
 
-- name: Process location map (phase 3)
+- name: Build reverse lookup for cachegroups to eligible phys_locations
   set_fact:
-    tmp_location_map: "{{ (tmp_location_map | default({})) | combine({ item: (physlocation_cache_tmp_list
| json_query(location_query) | join(','))}) }}"
-  with_items: "{{ used_locations }}"
+    cachegroup_lookup: "{{ cachegroup_lookup | default({}) | combine(vals) }}"
+  with_items: "{{ dl_ds_merged_divisions | json_query(cachegroup_query) | unique }}"
   vars:
-    location_query: "[].\"{{ item }}\""
+    cachegroup_query: "[*].regions[*].phys_locations[*].cachegroups[][][]"
+    physlocation_query: "[*].regions[*].phys_locations[?contains(cachegroups,'{{ item }}')].name[][]"
+    vals: "{{ [item] | associate_round_robin(dl_ds_merged_divisions | json_query(physlocation_query))
}}"
 
-- name: Assemble assignments of cache to location
+- name: Associate servers to physLocation
   set_fact:
-    physlocation_assignments: "{
-                              {%- for location, host_csv in tmp_location_map.items() -%}
-                              {%- set outer_loop = loop -%}
-                              {%- for i in host_csv.split(',') -%}
-                              '{{ i }}': '{{ location }}'
-                              {%- if not loop.last %},{% endif -%}
-                              {%- endfor %}
-                              {%- if not outer_loop.last %},{% endif -%}
-                              {%- endfor %}
-                              }"
+    physlocation_assignments: "{{ physlocation_assignments | default({}) | combine( eligible_physlocations
| associate_round_robin(servers_in_cachegroup) | denormalize_association ) }}"
+  with_items: "{{ cachegroups_assignments.values() | list | unique }}"
+  vars:
+    eligible_physlocations: "{{ cachegroup_lookup[item] }}"
+    servers_in_cachegroup: "{{ cachegroups_assignments | dict2items | json_query(fqdn_query)
}}"
+    fqdn_query: "[?value==`{{ item }}`].key"
 
 - name: Create Division
   uri:
@@ -593,46 +511,10 @@
     hosts: "{{ host_keys_tmp | to_json | from_json | json_query(host_query) }}"
     profile_keys: "{{ ccp_list | to_json | from_json | json_query(key_query) }}"
 
-- name: Associate profiles to hosts
-  command: "{{ role_path }}/files/selection.set.py {{ paired_elegible_keys_hosts[item].profiles
| join(',') | quote }} {{ paired_elegible_keys_hosts[item].hosts | join(',') | quote }}"
-  register: profiles_hosts_out
-  changed_when: false
-  with_items: "{{ paired_elegible_keys_hosts.keys() | list}}"
-
-- name: Massage profiles to hosts
-  set_fact:
-    profile_host_tmp_str: "{% for i in profiles_hosts_out.results %}{{ i.stdout | from_json
| to_json }}{% if not loop.last %}|{% endif %}{%endfor%}"
-
-- name: Process profile map (phase 1)
-  set_fact:
-    profile_host_tmp_list: "{{ profile_host_tmp_list | default([]) | union( [item  | from_json]
) }}"
-  with_items: "{{ profile_host_tmp_str.split('|') }}"
-
-- name: Process profile map (phase 2)
-  set_fact:
-    used_profiles: "{{ profile_host_tmp_list | json_query(profile_query) | unique }}"
-  vars:
-    profile_query: "[].keys(@)[]"
-
-- name: Process profile map (phase 3)
-  set_fact:
-    tmp_profile_map: "{{ (tmp_profile_map | default({})) | combine({ item: (profile_host_tmp_list
| json_query(profile_query) | join(','))}) }}"
-  with_items: "{{ used_profiles }}"
-  vars:
-    profile_query: "[].\"{{ item }}\""
-
-- name: Assemble assignments of profiles to hosts
+- name: Associate eligible profiles to hosts
   set_fact:
-    profile_assignments: "{
-                          {%- for profile, host_csv in tmp_profile_map.items() -%}
-                          {%- set outer_loop = loop -%}
-                          {%- for i in host_csv.split(',') -%}
-                          '{{ i }}': '{{ profile }}'
-                          {%- if not loop.last %},{% endif -%}
-                          {%- endfor %}
-                          {%- if not outer_loop.last %},{% endif -%}
-                          {%- endfor %}
-                          }"
+    profile_assignments: "{{ profile_assignments | default({}) | combine(paired_elegible_keys_hosts[item].profiles
| associate_round_robin(paired_elegible_keys_hosts[item].hosts) | denormalize_association)
}}"
+  with_items: "{{ paired_elegible_keys_hosts.keys() | list }}"
 
 - name: Get All Server Statuses
   uri:
diff --git a/infrastructure/ansible/roles/dataset_loader/tasks/update_mso_servers.yml b/infrastructure/ansible/roles/dataset_loader/tasks/update_mso_servers.yml
index 67aad06..7b9c155 100644
--- a/infrastructure/ansible/roles/dataset_loader/tasks/update_mso_servers.yml
+++ b/infrastructure/ansible/roles/dataset_loader/tasks/update_mso_servers.yml
@@ -16,14 +16,9 @@
     mso_server_ip: "{{ (mso_server_ip | default([]) ) + [ '127.'+(cdnDelegationIndex[Target_cdn_delegation]|string)+'.'+(item.value|string)+'.'+((Target_mso_ds.0
+ 1)|string) ] }}"
   with_dict: "{{ foIndex }}"
 
-- name: Distribute Eligible MSO Profiles Among Servers
-  command: "{{ role_path }}/files/selection.set.py {{ Target_mso_ds.1.eligible_mso_server_profile_names
| unique | sort | join(',') | quote }} {{ mso_server_ip | unique | sort | join(',') | quote
}} denormalize"
-  register: mso_server_profiles_out
-  changed_when: false
-
-- name: Convert MSO Profile distribution to something more usable
+- name: Associate eligible MSO profiles to hosts
   set_fact:
-    mso_server_profiles: "{{ mso_server_profiles_out.stdout | from_json }}"
+    mso_server_profiles: "{{ Target_mso_ds.1.eligible_mso_server_profile_names | unique |
sort | associate_round_robin(mso_server_ip | unique | sort) | denormalize_association }}"
 
 # NOTE: THIS BREAKS IDEMPOTENCY
 - name: Update Applicable MSO Parent Servers


Mime
View raw message