summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/feature_branch_deletion.yml24
-rw-r--r--.github/workflows/upload_schema.yml1
-rw-r--r--Makefile3
-rw-r--r--awx/api/serializers.py24
-rw-r--r--awx/api/templates/api/job_template_launch.md7
-rw-r--r--awx/api/views/__init__.py2
-rw-r--r--awx/main/models/inventory.py13
-rw-r--r--awx/main/models/jobs.py17
-rw-r--r--awx/main/models/projects.py23
-rw-r--r--awx/main/notifications/webhook_backend.py74
-rw-r--r--awx/main/registrar.py4
-rw-r--r--awx/main/tasks/callback.py17
-rw-r--r--awx/main/tasks/jobs.py8
-rw-r--r--awx/main/tests/functional/test_notifications.py44
-rw-r--r--awx/main/tests/unit/notifications/test_webhook.py21
-rw-r--r--awx/settings/defaults.py19
-rw-r--r--awx/ui/package-lock.json86
-rw-r--r--awx/ui/package.json6
-rw-r--r--awx/ui/src/screens/Job/JobOutput/HostEventModal.js14
-rw-r--r--awx/ui/src/screens/Job/JobOutput/HostEventModal.test.js48
-rw-r--r--awxkit/awxkit/api/pages/api.py8
-rw-r--r--awxkit/awxkit/api/pages/schedules.py5
-rw-r--r--tools/docker-compose-minikube/minikube/defaults/main.yml4
-rw-r--r--tools/docker-compose-minikube/minikube/tasks/main.yml39
-rw-r--r--tools/docker-compose/README.md12
25 files changed, 374 insertions, 149 deletions
diff --git a/.github/workflows/feature_branch_deletion.yml b/.github/workflows/feature_branch_deletion.yml
new file mode 100644
index 0000000000..c2a3bdc785
--- /dev/null
+++ b/.github/workflows/feature_branch_deletion.yml
@@ -0,0 +1,24 @@
+---
+name: Feature branch deletion cleanup
+on:
+ delete:
+ branches:
+ - feature_**
+jobs:
+ push:
+ runs-on: ubuntu-latest
+ permissions:
+ packages: write
+ contents: read
+ steps:
+ - name: Delete API Schema
+ env:
+ AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
+ AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
+ AWS_REGION: 'us-east-1'
+ run: |
+ ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
+ ansible localhost -c local -m aws_s3 \
+ -a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"
+
+
diff --git a/.github/workflows/upload_schema.yml b/.github/workflows/upload_schema.yml
index a9a4420f8f..8f258dd7c1 100644
--- a/.github/workflows/upload_schema.yml
+++ b/.github/workflows/upload_schema.yml
@@ -5,6 +5,7 @@ on:
branches:
- devel
- release_**
+ - feature_**
jobs:
push:
runs-on: ubuntu-latest
diff --git a/Makefile b/Makefile
index 65e5aec54b..cb8b86cdac 100644
--- a/Makefile
+++ b/Makefile
@@ -454,6 +454,7 @@ COMPOSE_OPTS ?=
CONTROL_PLANE_NODE_COUNT ?= 1
EXECUTION_NODE_COUNT ?= 2
MINIKUBE_CONTAINER_GROUP ?= false
+MINIKUBE_SETUP ?= false # if false, run minikube separately
EXTRA_SOURCES_ANSIBLE_OPTS ?=
ifneq ($(ADMIN_PASSWORD),)
@@ -462,7 +463,7 @@ endif
docker-compose-sources: .git/hooks/pre-commit
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
- ansible-playbook -i tools/docker-compose/inventory tools/docker-compose-minikube/deploy.yml; \
+ ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
fi;
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
diff --git a/awx/api/serializers.py b/awx/api/serializers.py
index cf6fa391e9..c4436424f5 100644
--- a/awx/api/serializers.py
+++ b/awx/api/serializers.py
@@ -2221,6 +2221,15 @@ class InventorySourceUpdateSerializer(InventorySourceSerializer):
class Meta:
fields = ('can_update',)
+ def validate(self, attrs):
+ project = self.instance.source_project
+ if project:
+ failed_reason = project.get_reason_if_failed()
+ if failed_reason:
+ raise serializers.ValidationError(failed_reason)
+
+ return super(InventorySourceUpdateSerializer, self).validate(attrs)
+
class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer):
@@ -4272,17 +4281,10 @@ class JobLaunchSerializer(BaseSerializer):
# Basic validation - cannot run a playbook without a playbook
if not template.project:
errors['project'] = _("A project is required to run a job.")
- elif template.project.status in ('error', 'failed'):
- errors['playbook'] = _("Missing a revision to run due to failed project update.")
-
- latest_update = template.project.project_updates.last()
- if latest_update is not None and latest_update.failed:
- failed_validation_tasks = latest_update.project_update_events.filter(
- event='runner_on_failed',
- play="Perform project signature/checksum verification",
- )
- if failed_validation_tasks:
- errors['playbook'] = _("Last project update failed due to signature validation failure.")
+ else:
+ failure_reason = template.project.get_reason_if_failed()
+ if failure_reason:
+ errors['playbook'] = failure_reason
# cannot run a playbook without an inventory
if template.inventory and template.inventory.pending_deletion is True:
diff --git a/awx/api/templates/api/job_template_launch.md b/awx/api/templates/api/job_template_launch.md
index 5fec56ec6c..be5d584cd0 100644
--- a/awx/api/templates/api/job_template_launch.md
+++ b/awx/api/templates/api/job_template_launch.md
@@ -1,5 +1,5 @@
Launch a Job Template:
-
+{% ifmeth GET %}
Make a GET request to this resource to determine if the job_template can be
launched and whether any passwords are required to launch the job_template.
The response will include the following fields:
@@ -29,8 +29,8 @@ The response will include the following fields:
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
associated with the job template. If not then one should be supplied when
launching the job (boolean, read-only)
-
-Make a POST request to this resource to launch the job_template. If any
+{% endifmeth %}
+{% ifmeth POST %}Make a POST request to this resource to launch the job_template. If any
passwords, inventory, or extra variables (extra_vars) are required, they must
be passed via POST data, with extra_vars given as a YAML or JSON string and
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
@@ -41,3 +41,4 @@ are not provided, a 400 status code will be returned. If the job cannot be
launched, a 405 status code will be returned. If the provided credential or
inventory are not allowed to be used by the user, then a 403 status code will
be returned.
+{% endifmeth %} \ No newline at end of file
diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py
index 14fae507d3..90e52ed883 100644
--- a/awx/api/views/__init__.py
+++ b/awx/api/views/__init__.py
@@ -2221,6 +2221,8 @@ class InventorySourceUpdateView(RetrieveAPIView):
def post(self, request, *args, **kwargs):
obj = self.get_object()
+ serializer = self.get_serializer(instance=obj, data=request.data)
+ serializer.is_valid(raise_exception=True)
if obj.can_update:
update = obj.update()
if not update:
diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py
index 54c7d3e2b1..d685ddb4e2 100644
--- a/awx/main/models/inventory.py
+++ b/awx/main/models/inventory.py
@@ -247,6 +247,19 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
return (number, step)
def get_sliced_hosts(self, host_queryset, slice_number, slice_count):
+ """
+ Returns a slice of Hosts given a slice number and total slice count, or
+ the original queryset if slicing is not requested.
+
+ NOTE: If slicing is performed, this will return a List[Host] with the
+ resulting slice. If slicing is not performed it will return the
+ original queryset (not evaluating it or forcing it to a list). This
+ puts the burden on the caller to check the resulting type. This is
+ non-ideal because it's easy to get wrong, but I think the only way
+ around it is to force the queryset which has memory implications for
+ large inventories.
+ """
+
if slice_count > 1 and slice_number > 0:
offset = slice_number - 1
host_queryset = host_queryset[offset::slice_count]
diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py
index a84a5a67eb..cc1a477899 100644
--- a/awx/main/models/jobs.py
+++ b/awx/main/models/jobs.py
@@ -15,6 +15,7 @@ from urllib.parse import urljoin
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
+from django.db.models.query import QuerySet
# from django.core.cache import cache
from django.utils.encoding import smart_str
@@ -844,22 +845,30 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
def get_notification_friendly_name(self):
return "Job"
- def _get_inventory_hosts(self, only=['name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id']):
+ def _get_inventory_hosts(self, only=('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id'), **filters):
+ """Return value is an iterable for the relevant hosts for this job"""
if not self.inventory:
return []
host_queryset = self.inventory.hosts.only(*only)
- return self.inventory.get_sliced_hosts(host_queryset, self.job_slice_number, self.job_slice_count)
+ if filters:
+ host_queryset = host_queryset.filter(**filters)
+ host_queryset = self.inventory.get_sliced_hosts(host_queryset, self.job_slice_number, self.job_slice_count)
+ if isinstance(host_queryset, QuerySet):
+ return host_queryset.iterator()
+ return host_queryset
def start_job_fact_cache(self, destination, modification_times, timeout=None):
self.log_lifecycle("start_job_fact_cache")
os.makedirs(destination, mode=0o700)
- hosts = self._get_inventory_hosts()
+
if timeout is None:
timeout = settings.ANSIBLE_FACT_CACHE_TIMEOUT
if timeout > 0:
# exclude hosts with fact data older than `settings.ANSIBLE_FACT_CACHE_TIMEOUT seconds`
timeout = now() - datetime.timedelta(seconds=timeout)
- hosts = hosts.filter(ansible_facts_modified__gte=timeout)
+ hosts = self._get_inventory_hosts(ansible_facts_modified__gte=timeout)
+ else:
+ hosts = self._get_inventory_hosts()
for host in hosts:
filepath = os.sep.join(map(str, [destination, host.name]))
if not os.path.realpath(filepath).startswith(destination):
diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py
index 5af858fa8d..6577d24c40 100644
--- a/awx/main/models/projects.py
+++ b/awx/main/models/projects.py
@@ -471,6 +471,29 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
def get_absolute_url(self, request=None):
return reverse('api:project_detail', kwargs={'pk': self.pk}, request=request)
+ def get_reason_if_failed(self):
+ """
+ If the project is in a failed or errored state, return a human-readable
+ error message explaining why. Otherwise return None.
+
+ This is used during validation in the serializer and also by
+ RunProjectUpdate/RunInventoryUpdate.
+ """
+
+ if self.status not in ('error', 'failed'):
+ return None
+
+ latest_update = self.project_updates.last()
+ if latest_update is not None and latest_update.failed:
+ failed_validation_tasks = latest_update.project_update_events.filter(
+ event='runner_on_failed',
+ play="Perform project signature/checksum verification",
+ )
+ if failed_validation_tasks:
+ return _("Last project update failed due to signature validation failure.")
+
+ return _("Missing a revision to run due to failed project update.")
+
'''
RelatedJobsMixin
'''
diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py
index 30518e0714..17ced60f9f 100644
--- a/awx/main/notifications/webhook_backend.py
+++ b/awx/main/notifications/webhook_backend.py
@@ -5,9 +5,6 @@ import json
import logging
import requests
-from django.utils.encoding import smart_str
-from django.utils.translation import gettext_lazy as _
-
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.utils import get_awx_http_client_headers
from awx.main.notifications.custom_notification_base import CustomNotificationBase
@@ -17,6 +14,8 @@ logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
+ MAX_RETRIES = 5
+
init_parameters = {
"url": {"label": "Target URL", "type": "string"},
"http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
@@ -64,20 +63,67 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
if self.http_method.lower() not in ['put', 'post']:
raise ValueError("HTTP method must be either 'POST' or 'PUT'.")
chosen_method = getattr(requests, self.http_method.lower(), None)
+
for m in messages:
+
auth = None
if self.username or self.password:
auth = (self.username, self.password)
- r = chosen_method(
- "{}".format(m.recipients()[0]),
- auth=auth,
- data=json.dumps(m.body, ensure_ascii=False).encode('utf-8'),
- headers=dict(list(get_awx_http_client_headers().items()) + list((self.headers or {}).items())),
- verify=(not self.disable_ssl_verification),
- )
- if r.status_code >= 400:
- logger.error(smart_str(_("Error sending notification webhook: {}").format(r.status_code)))
+
+ # the constructor for EmailMessage - https://docs.djangoproject.com/en/4.1/_modules/django/core/mail/message will turn an empty dictionary to an empty string
+ # sometimes an empty dict is intentional and we added this conditional to enforce that
+ if not m.body:
+ m.body = {}
+
+ url = str(m.recipients()[0])
+ data = json.dumps(m.body, ensure_ascii=False).encode('utf-8')
+ headers = {**(get_awx_http_client_headers()), **(self.headers or {})}
+
+ err = None
+
+ for retries in range(self.MAX_RETRIES):
+
+ # Sometimes we hit redirect URLs. We must account for this. We still extract the redirect URL from the response headers and try again. Max retires == 5
+ resp = chosen_method(
+ url=url,
+ auth=auth,
+ data=data,
+ headers=headers,
+ verify=(not self.disable_ssl_verification),
+ allow_redirects=False, # override default behaviour for redirects
+ )
+
+ # either success or error reached if this conditional fires
+ if resp.status_code not in [301, 307]:
+ break
+
+ # we've hit a redirect. extract the redirect URL out of the first response header and try again
+ logger.warning(
+ f"Received a {resp.status_code} from {url}, trying to reach redirect url {resp.headers.get('Location', None)}; attempt #{retries+1}"
+ )
+
+ # take the first redirect URL in the response header and try that
+ url = resp.headers.get("Location", None)
+
+ if url is None:
+ err = f"Webhook notification received redirect to a blank URL from {url}. Response headers={resp.headers}"
+ break
+ else:
+ # no break condition in the loop encountered; therefore we have hit the maximum number of retries
+ err = f"Webhook notification max number of retries [{self.MAX_RETRIES}] exceeded. Failed to send webhook notification to {url}"
+
+ if resp.status_code >= 400:
+ err = f"Error sending webhook notification: {resp.status_code}"
+
+ # log error message
+ if err:
+ logger.error(err)
if not self.fail_silently:
- raise Exception(smart_str(_("Error sending notification webhook: {}").format(r.status_code)))
- sent_messages += 1
+ raise Exception(err)
+
+ # no errors were encountered therefore we successfully sent off the notification webhook
+ if resp.status_code in range(200, 299):
+ logger.debug(f"Notification webhook successfully sent to {url}. Received {resp.status_code}")
+ sent_messages += 1
+
return sent_messages
diff --git a/awx/main/registrar.py b/awx/main/registrar.py
index 31133f936b..d13f5b6857 100644
--- a/awx/main/registrar.py
+++ b/awx/main/registrar.py
@@ -3,6 +3,8 @@
from django.db.models.signals import pre_save, post_save, pre_delete, m2m_changed
+from taggit.managers import TaggableManager
+
class ActivityStreamRegistrar(object):
def __init__(self):
@@ -19,6 +21,8 @@ class ActivityStreamRegistrar(object):
pre_delete.connect(activity_stream_delete, sender=model, dispatch_uid=str(self.__class__) + str(model) + "_delete")
for m2mfield in model._meta.many_to_many:
+ if isinstance(m2mfield, TaggableManager):
+ continue # Special case for taggit app
try:
m2m_attr = getattr(model, m2mfield.name)
m2m_changed.connect(
diff --git a/awx/main/tasks/callback.py b/awx/main/tasks/callback.py
index e131f368a5..c99d9e3027 100644
--- a/awx/main/tasks/callback.py
+++ b/awx/main/tasks/callback.py
@@ -2,8 +2,6 @@ import json
import time
import logging
from collections import deque
-import os
-import stat
# Django
from django.conf import settings
@@ -206,21 +204,6 @@ class RunnerCallback:
self.instance = self.update_model(self.instance.pk, job_args=json.dumps(runner_config.command), job_cwd=runner_config.cwd, job_env=job_env)
# We opened a connection just for that save, close it here now
connections.close_all()
- elif status_data['status'] == 'failed':
- # For encrypted ssh_key_data, ansible-runner worker will open and write the
- # ssh_key_data to a named pipe. Then, once the podman container starts, ssh-agent will
- # read from this named pipe so that the key can be used in ansible-playbook.
- # Once the podman container exits, the named pipe is deleted.
- # However, if the podman container fails to start in the first place, e.g. the image
- # name is incorrect, then this pipe is not cleaned up. Eventually ansible-runner
- # processor will attempt to write artifacts to the private data dir via unstream_dir, requiring
- # that it open this named pipe. This leads to a hang. Thus, before any artifacts
- # are written by the processor, it's important to remove this ssh_key_data pipe.
- private_data_dir = self.instance.job_env.get('AWX_PRIVATE_DATA_DIR', None)
- if private_data_dir:
- key_data_file = os.path.join(private_data_dir, 'artifacts', str(self.instance.id), 'ssh_key_data')
- if os.path.exists(key_data_file) and stat.S_ISFIFO(os.stat(key_data_file).st_mode):
- os.remove(key_data_file)
elif status_data['status'] == 'error':
result_traceback = status_data.get('result_traceback', None)
if result_traceback:
diff --git a/awx/main/tasks/jobs.py b/awx/main/tasks/jobs.py
index 3295adcc9c..3557c4110c 100644
--- a/awx/main/tasks/jobs.py
+++ b/awx/main/tasks/jobs.py
@@ -767,6 +767,10 @@ class SourceControlMixin(BaseTask):
try:
original_branch = None
+ failed_reason = project.get_reason_if_failed()
+ if failed_reason:
+ self.update_model(self.instance.pk, status='failed', job_explanation=failed_reason)
+ raise RuntimeError(failed_reason)
project_path = project.get_project_path(check_if_exists=False)
if project.scm_type == 'git' and (scm_branch and scm_branch != project.scm_branch):
if os.path.exists(project_path):
@@ -1056,10 +1060,6 @@ class RunJob(SourceControlMixin, BaseTask):
error = _('Job could not start because no Execution Environment could be found.')
self.update_model(job.pk, status='error', job_explanation=error)
raise RuntimeError(error)
- elif job.project.status in ('error', 'failed'):
- msg = _('The project revision for this job template is unknown due to a failed update.')
- job = self.update_model(job.pk, status='failed', job_explanation=msg)
- raise RuntimeError(msg)
if job.inventory.kind == 'smart':
# cache smart inventory memberships so that the host_filter query is not
diff --git a/awx/main/tests/functional/test_notifications.py b/awx/main/tests/functional/test_notifications.py
index ce3873c223..7396b77843 100644
--- a/awx/main/tests/functional/test_notifications.py
+++ b/awx/main/tests/functional/test_notifications.py
@@ -75,6 +75,7 @@ def test_encrypted_subfields(get, post, user, organization):
url = reverse('api:notification_template_detail', kwargs={'pk': response.data['id']})
response = get(url, u)
assert response.data['notification_configuration']['account_token'] == "$encrypted$"
+
with mock.patch.object(notification_template_actual.notification_class, "send_messages", assert_send):
notification_template_actual.send("Test", {'body': "Test"})
@@ -175,3 +176,46 @@ def test_custom_environment_injection(post, user, organization):
fake_send.side_effect = _send_side_effect
template.send('subject', 'message')
+
+
+def mock_post(*args, **kwargs):
+ class MockGoodResponse:
+ def __init__(self):
+ self.status_code = 200
+
+ class MockRedirectResponse:
+ def __init__(self):
+ self.status_code = 301
+ self.headers = {"Location": "http://goodendpoint"}
+
+ if kwargs['url'] == "http://goodendpoint":
+ return MockGoodResponse()
+ else:
+ return MockRedirectResponse()
+
+
+@pytest.mark.django_db
+@mock.patch('requests.post', side_effect=mock_post)
+def test_webhook_notification_pointed_to_a_redirect_launch_endpoint(post, admin, organization):
+
+ n1 = NotificationTemplate.objects.create(
+ name="test-webhook",
+ description="test webhook",
+ organization=organization,
+ notification_type="webhook",
+ notification_configuration=dict(
+ url="http://some.fake.url",
+ disable_ssl_verification=True,
+ http_method="POST",
+ headers={
+ "Content-Type": "application/json",
+ },
+ username=admin.username,
+ password=admin.password,
+ ),
+ messages={
+ "success": {"message": "", "body": "{}"},
+ },
+ )
+
+ assert n1.send("", n1.messages.get("success").get("body")) == 1
diff --git a/awx/main/tests/unit/notifications/test_webhook.py b/awx/main/tests/unit/notifications/test_webhook.py
index db4255fb35..b2c92c59ab 100644
--- a/awx/main/tests/unit/notifications/test_webhook.py
+++ b/awx/main/tests/unit/notifications/test_webhook.py
@@ -27,11 +27,12 @@ def test_send_messages_as_POST():
]
)
requests_mock.post.assert_called_once_with(
- 'http://example.com',
+ url='http://example.com',
auth=None,
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
verify=True,
+ allow_redirects=False,
)
assert sent_messages == 1
@@ -57,11 +58,12 @@ def test_send_messages_as_PUT():
]
)
requests_mock.put.assert_called_once_with(
- 'http://example.com',
+ url='http://example.com',
auth=None,
data=json.dumps({'text': 'test body 2'}, ensure_ascii=False).encode('utf-8'),
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
verify=True,
+ allow_redirects=False,
)
assert sent_messages == 1
@@ -87,11 +89,12 @@ def test_send_messages_with_username():
]
)
requests_mock.post.assert_called_once_with(
- 'http://example.com',
+ url='http://example.com',
auth=('userstring', None),
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
verify=True,
+ allow_redirects=False,
)
assert sent_messages == 1
@@ -117,11 +120,12 @@ def test_send_messages_with_password():
]
)
requests_mock.post.assert_called_once_with(
- 'http://example.com',
+ url='http://example.com',
auth=(None, 'passwordstring'),
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
verify=True,
+ allow_redirects=False,
)
assert sent_messages == 1
@@ -147,11 +151,12 @@ def test_send_messages_with_username_and_password():
]
)
requests_mock.post.assert_called_once_with(
- 'http://example.com',
+ url='http://example.com',
auth=('userstring', 'passwordstring'),
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
verify=True,
+ allow_redirects=False,
)
assert sent_messages == 1
@@ -177,11 +182,12 @@ def test_send_messages_with_no_verify_ssl():
]
)
requests_mock.post.assert_called_once_with(
- 'http://example.com',
+ url='http://example.com',
auth=None,
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
verify=False,
+ allow_redirects=False,
)
assert sent_messages == 1
@@ -207,7 +213,7 @@ def test_send_messages_with_additional_headers():
]
)
requests_mock.post.assert_called_once_with(
- 'http://example.com',
+ url='http://example.com',
auth=None,
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
headers={
@@ -217,5 +223,6 @@ def test_send_messages_with_additional_headers():
'X-Test-Header2': 'test-content-2',
},
verify=True,
+ allow_redirects=False,
)
assert sent_messages == 1
diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py
index af91ee58f2..b45595e6ac 100644
--- a/awx/settings/defaults.py
+++ b/awx/settings/defaults.py
@@ -254,6 +254,14 @@ START_TASK_LIMIT = 100
TASK_MANAGER_TIMEOUT = 300
TASK_MANAGER_TIMEOUT_GRACE_PERIOD = 60
+# Number of seconds _in addition to_ the task manager timeout a job can stay
+# in waiting without being reaped
+JOB_WAITING_GRACE_PERIOD = 60
+
+# Number of seconds after a container group job finished time to wait
+# before the awx_k8s_reaper task will tear down the pods
+K8S_POD_REAPER_GRACE_PERIOD = 60
+
# Disallow sending session cookies over insecure connections
SESSION_COOKIE_SECURE = True
@@ -1004,16 +1012,5 @@ DEFAULT_CONTAINER_RUN_OPTIONS = ['--network', 'slirp4netns:enable_ipv6=true']
# Mount exposed paths as hostPath resource in k8s/ocp
AWX_MOUNT_ISOLATED_PATHS_ON_K8S = False
-# Time out task managers if they take longer than this many seconds
-TASK_MANAGER_TIMEOUT = 300
-
-# Number of seconds _in addition to_ the task manager timeout a job can stay
-# in waiting without being reaped
-JOB_WAITING_GRACE_PERIOD = 60
-
-# Number of seconds after a container group job finished time to wait
-# before the awx_k8s_reaper task will tear down the pods
-K8S_POD_REAPER_GRACE_PERIOD = 60
-
# This is overridden downstream via /etc/tower/conf.d/cluster_host_id.py
CLUSTER_HOST_ID = socket.gethostname()
diff --git a/awx/ui/package-lock.json b/awx/ui/package-lock.json
index 2b55750efe..ea8733527b 100644
--- a/awx/ui/package-lock.json
+++ b/awx/ui/package-lock.json
@@ -7,9 +7,9 @@
"name": "ui",
"dependencies": {
"@lingui/react": "3.14.0",
- "@patternfly/patternfly": "4.210.2",
- "@patternfly/react-core": "^4.239.0",
- "@patternfly/react-icons": "4.90.0",
+ "@patternfly/patternfly": "4.217.1",
+ "@patternfly/react-core": "^4.250.1",
+ "@patternfly/react-icons": "4.92.10",
"@patternfly/react-table": "4.108.0",
"ace-builds": "^1.10.1",
"ansi-to-html": "0.7.2",
@@ -3747,26 +3747,26 @@
"dev": true
},
"node_modules/@patternfly/patternfly": {
- "version": "4.210.2",
- "resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.210.2.tgz",
- "integrity": "sha512-aZiW24Bxi6uVmk5RyNTp+6q6ThtlJZotNRJfWVeGuwu1UlbBuV4DFa1bpjA6jfTZpfEpX2YL5+R+4ZVSCFAVdw=="
+ "version": "4.217.1",
+ "resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.217.1.tgz",
+ "integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
},
"node_modules/@patternfly/react-core": {
- "version": "4.239.0",
- "resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.239.0.tgz",
- "integrity": "sha512-6CmYABCJLUXTlzCk6C3WouMNZpS0BCT+aHU8CvYpFQ/NrpYp3MJaDsYbqgCRWV42rmIO5iXun/4WhXBJzJEoQg==",
+ "version": "4.250.1",
+ "resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.250.1.tgz",
+ "integrity": "sha512-vAOZPQdZzYXl/vkHnHMIt1eC3nrPDdsuuErPatkNPwmSvilXuXmWP5wxoJ36FbSNRRURkprFwx52zMmWS3iHJA==",
"dependencies": {
- "@patternfly/react-icons": "^4.90.0",
- "@patternfly/react-styles": "^4.89.0",
- "@patternfly/react-tokens": "^4.91.0",
+ "@patternfly/react-icons": "^4.92.6",
+ "@patternfly/react-styles": "^4.91.6",
+ "@patternfly/react-tokens": "^4.93.6",
"focus-trap": "6.9.2",
"react-dropzone": "9.0.0",
"tippy.js": "5.1.2",
"tslib": "^2.0.0"
},
"peerDependencies": {
- "react": "^16.8.0 || ^17.0.0",
- "react-dom": "^16.8.0 || ^17.0.0"
+ "react": "^16.8 || ^17 || ^18",
+ "react-dom": "^16.8 || ^17 || ^18"
}
},
"node_modules/@patternfly/react-core/node_modules/tslib": {
@@ -3775,18 +3775,18 @@
"integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw=="
},
"node_modules/@patternfly/react-icons": {
- "version": "4.90.0",
- "resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.90.0.tgz",
- "integrity": "sha512-qEnQKbxbUgyiosiKSkeKEBwmhgJwWEqniIAFyoxj+kpzAdeu7ueWe5iBbqo06mvDOedecFiM5mIE1N0MXwk8Yw==",
+ "version": "4.92.10",
+ "resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.92.10.tgz",
+ "integrity": "sha512-vwCy7b+OyyuvLDSLqLUG2DkJZgMDogjld8tJTdAaG8HiEhC1sJPZac+5wD7AuS3ym/sQolS4vYtNiVDnMEORxA==",
"peerDependencies": {
- "react": "^16.8.0 || ^17.0.0",
- "react-dom": "^16.8.0 || ^17.0.0"
+ "react": "^16.8 || ^17 || ^18",
+ "react-dom": "^16.8 || ^17 || ^18"
}
},
"node_modules/@patternfly/react-styles": {
- "version": "4.89.0",
- "resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.89.0.tgz",
- "integrity": "sha512-SkT+qx3Xqu70T5s+i/AUT2hI2sKAPDX4ffeiJIUDu/oyWiFdk+/9DEivnLSyJMruroXXN33zKibvzb5rH7DKTQ=="
+ "version": "4.91.10",
+ "resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.91.10.tgz",
+ "integrity": "sha512-fAG4Vjp63ohiR92F4e/Gkw5q1DSSckHKqdnEF75KUpSSBORzYP0EKMpupSd6ItpQFJw3iWs3MJi3/KIAAfU1Jw=="
},
"node_modules/@patternfly/react-table": {
"version": "4.108.0",
@@ -3811,9 +3811,9 @@
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
},
"node_modules/@patternfly/react-tokens": {
- "version": "4.91.0",
- "resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.91.0.tgz",
- "integrity": "sha512-QeQCy8o8E/16fAr8mxqXIYRmpTsjCHJXi5p5jmgEDFmYMesN6Pqfv6N5D0FHb+CIaNOZWRps7GkWvlIMIE81sw=="
+ "version": "4.93.10",
+ "resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.93.10.tgz",
+ "integrity": "sha512-F+j1irDc9M6zvY6qNtDryhbpnHz3R8ymHRdGelNHQzPTIK88YSWEnT1c9iUI+uM/iuZol7sJmO5STtg2aPIDRQ=="
},
"node_modules/@pmmmwh/react-refresh-webpack-plugin": {
"version": "0.5.4",
@@ -25089,18 +25089,18 @@
"dev": true
},
"@patternfly/patternfly": {
- "version": "4.210.2",
- "resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.210.2.tgz",
- "integrity": "sha512-aZiW24Bxi6uVmk5RyNTp+6q6ThtlJZotNRJfWVeGuwu1UlbBuV4DFa1bpjA6jfTZpfEpX2YL5+R+4ZVSCFAVdw=="
+ "version": "4.217.1",
+ "resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.217.1.tgz",
+ "integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
},
"@patternfly/react-core": {
- "version": "4.239.0",
- "resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.239.0.tgz",
- "integrity": "sha512-6CmYABCJLUXTlzCk6C3WouMNZpS0BCT+aHU8CvYpFQ/NrpYp3MJaDsYbqgCRWV42rmIO5iXun/4WhXBJzJEoQg==",
+ "version": "4.250.1",
+ "resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.250.1.tgz",
+ "integrity": "sha512-vAOZPQdZzYXl/vkHnHMIt1eC3nrPDdsuuErPatkNPwmSvilXuXmWP5wxoJ36FbSNRRURkprFwx52zMmWS3iHJA==",
"requires": {
- "@patternfly/react-icons": "^4.90.0",
- "@patternfly/react-styles": "^4.89.0",
- "@patternfly/react-tokens": "^4.91.0",
+ "@patternfly/react-icons": "^4.92.6",
+ "@patternfly/react-styles": "^4.91.6",
+ "@patternfly/react-tokens": "^4.93.6",
"focus-trap": "6.9.2",
"react-dropzone": "9.0.0",
"tippy.js": "5.1.2",
@@ -25115,15 +25115,15 @@
}
},
"@patternfly/react-icons": {
- "version": "4.90.0",
- "resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.90.0.tgz",
- "integrity": "sha512-qEnQKbxbUgyiosiKSkeKEBwmhgJwWEqniIAFyoxj+kpzAdeu7ueWe5iBbqo06mvDOedecFiM5mIE1N0MXwk8Yw==",
+ "version": "4.92.10",
+ "resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.92.10.tgz",
+ "integrity": "sha512-vwCy7b+OyyuvLDSLqLUG2DkJZgMDogjld8tJTdAaG8HiEhC1sJPZac+5wD7AuS3ym/sQolS4vYtNiVDnMEORxA==",
"requires": {}
},
"@patternfly/react-styles": {
- "version": "4.89.0",
- "resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.89.0.tgz",
- "integrity": "sha512-SkT+qx3Xqu70T5s+i/AUT2hI2sKAPDX4ffeiJIUDu/oyWiFdk+/9DEivnLSyJMruroXXN33zKibvzb5rH7DKTQ=="
+ "version": "4.91.10",
+ "resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.91.10.tgz",
+ "integrity": "sha512-fAG4Vjp63ohiR92F4e/Gkw5q1DSSckHKqdnEF75KUpSSBORzYP0EKMpupSd6ItpQFJw3iWs3MJi3/KIAAfU1Jw=="
},
"@patternfly/react-table": {
"version": "4.108.0",
@@ -25146,9 +25146,9 @@
}
},
"@patternfly/react-tokens": {
- "version": "4.91.0",
- "resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.91.0.tgz",
- "integrity": "sha512-QeQCy8o8E/16fAr8mxqXIYRmpTsjCHJXi5p5jmgEDFmYMesN6Pqfv6N5D0FHb+CIaNOZWRps7GkWvlIMIE81sw=="
+ "version": "4.93.10",
+ "resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.93.10.tgz",
+ "integrity": "sha512-F+j1irDc9M6zvY6qNtDryhbpnHz3R8ymHRdGelNHQzPTIK88YSWEnT1c9iUI+uM/iuZol7sJmO5STtg2aPIDRQ=="
},
"@pmmmwh/react-refresh-webpack-plugin": {
"version": "0.5.4",
diff --git a/awx/ui/package.json b/awx/ui/package.json
index 8f5f4b0fba..0c117173df 100644
--- a/awx/ui/package.json
+++ b/awx/ui/package.json
@@ -7,9 +7,9 @@
},
"dependencies": {
"@lingui/react": "3.14.0",
- "@patternfly/patternfly": "4.210.2",
- "@patternfly/react-core": "^4.239.0",
- "@patternfly/react-icons": "4.90.0",
+ "@patternfly/patternfly": "4.217.1",
+ "@patternfly/react-core": "^4.250.1",
+ "@patternfly/react-icons": "4.92.10",
"@patternfly/react-table": "4.108.0",
"ace-builds": "^1.10.1",
"ansi-to-html": "0.7.2",
diff --git a/awx/ui/src/screens/Job/JobOutput/HostEventModal.js b/awx/ui/src/screens/Job/JobOutput/HostEventModal.js
index 57fe7ce05f..a7295c1692 100644
--- a/awx/ui/src/screens/Job/JobOutput/HostEventModal.js
+++ b/awx/ui/src/screens/Job/JobOutput/HostEventModal.js
@@ -70,7 +70,6 @@ const getStdOutValue = (hostEvent) => {
function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
const [hostStatus, setHostStatus] = useState(null);
const [activeTabKey, setActiveTabKey] = useState(0);
-
useEffect(() => {
setHostStatus(processEventStatus(hostEvent));
}, [setHostStatus, hostEvent]);
@@ -108,11 +107,11 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
style={{ alignItems: 'center', marginTop: '20px' }}
gutter="sm"
>
- <Detail label={t`Host`} value={hostEvent.host_name} />
- {hostEvent.summary_fields.host?.description ? (
+ <Detail label={t`Host`} value={hostEvent.event_data?.host} />
+ {hostEvent.summary_fields?.host?.description ? (
<Detail
label={t`Description`}
- value={hostEvent.summary_fields.host.description}
+ value={hostEvent.summary_fields?.host?.description}
/>
) : null}
{hostStatus ? (
@@ -125,12 +124,9 @@ function HostEventModal({ onClose, hostEvent = {}, isOpen = false }) {
<Detail label={t`Task`} value={hostEvent.task} />
<Detail
label={t`Module`}
- value={hostEvent.event_data.task_action || t`No result found`}
- />
- <Detail
- label={t`Command`}
- value={hostEvent?.event_data?.res?.cmd}
+ value={hostEvent.event_data?.task_action || t`No result found`}
/>
+ <Detail label={t`Command`} value={hostEvent.event_data?.res?.cmd} />
</DetailList>
</Tab>
<Tab
diff --git a/awx/ui/src/screens/Job/JobOutput/HostEventModal.test.js b/awx/ui/src/screens/Job/JobOutput/HostEventModal.test.js
index 96866c4b03..0b877b4e4c 100644
--- a/awx/ui/src/screens/Job/JobOutput/HostEventModal.test.js
+++ b/awx/ui/src/screens/Job/JobOutput/HostEventModal.test.js
@@ -52,6 +52,47 @@ const hostEvent = {
},
};
+const partialHostEvent = {
+ changed: true,
+ event: 'runner_on_ok',
+ event_data: {
+ host: 'foo',
+ play: 'all',
+ playbook: 'run_command.yml',
+ res: {
+ ansible_loop_var: 'item',
+ changed: true,
+ item: '1',
+ msg: 'This is a debug message: 1',
+ stdout:
+ ' total used free shared buff/cache available\nMem: 7973 3005 960 30 4007 4582\nSwap: 1023 0 1023',
+ stderr: 'problems',
+ cmd: ['free', '-m'],
+ stderr_lines: [],
+ stdout_lines: [
+ ' total used free shared buff/cache available',
+ 'Mem: 7973 3005 960 30 4007 4582',
+ 'Swap: 1023 0 1023',
+ ],
+ },
+ task: 'command',
+ task_action: 'command',
+ },
+ event_display: 'Host OK',
+ event_level: 3,
+ failed: false,
+ host: 1,
+ id: 123,
+ job: 4,
+ play: 'all',
+ playbook: 'run_command.yml',
+ stdout: `stdout: "changed: [localhost] => {"changed": true, "cmd": ["free", "-m"], "delta": "0:00:01.479609", "end": "2019-09-10 14:21:45.469533", "rc": 0, "start": "2019-09-10 14:21:43.989924", "stderr": "", "stderr_lines": [], "stdout": " total used free shared buff/cache available\nMem: 7973 3005 960 30 4007 4582\nSwap: 1023 0 1023", "stdout_lines": [" total used free shared buff/cache available", "Mem: 7973 3005 960 30 4007 4582", "Swap: 1023 0 1023"]}"
+ `,
+ task: 'command',
+ type: 'job_event',
+ url: '/api/v2/job_events/123/',
+};
+
/*
Some libraries return a list of string in stdout
Example: https://github.com/ansible-collections/cisco.ios/blob/main/plugins/modules/ios_command.py#L124-L128
@@ -134,6 +175,13 @@ describe('HostEventModal', () => {
expect(wrapper).toHaveLength(1);
});
+ test('renders successfully with partial data', () => {
+ const wrapper = shallow(
+ <HostEventModal hostEvent={partialHostEvent} onClose={() => {}} />
+ );
+ expect(wrapper).toHaveLength(1);
+ });
+
test('should render all tabs', () => {
const wrapper = shallow(
<HostEventModal hostEvent={hostEvent} onClose={() => {}} isOpen />
diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py
index cd600d9dc4..2fcc62648a 100644
--- a/awxkit/awxkit/api/pages/api.py
+++ b/awxkit/awxkit/api/pages/api.py
@@ -275,7 +275,13 @@ class ApiV2(base.Base):
# When creating a project, we need to wait for its
# first project update to finish so that associated
# JTs have valid options for playbook names
- _page.wait_until_completed()
+ try:
+ _page.wait_until_completed(timeout=300)
+ except AssertionError:
+ # If the project update times out, try to
+ # carry on in the hopes that it will
+ # finish before it is needed.
+ pass
else:
# If we are an existing project and our scm_tpye is not changing don't try and import the local_path setting
if asset['natural_key']['type'] == 'project' and 'local_path' in post_data and _page['scm_type'] == post_data['scm_type']:
diff --git a/awxkit/awxkit/api/pages/schedules.py b/awxkit/awxkit/api/pages/schedules.py
index 3ff9e1c0bb..1fa34c81e6 100644
--- a/awxkit/awxkit/api/pages/schedules.py
+++ b/awxkit/awxkit/api/pages/schedules.py
@@ -1,6 +1,7 @@
from contextlib import suppress
-from awxkit.api.pages import SystemJobTemplate
+from awxkit.api.pages import JobTemplate, SystemJobTemplate, Project, InventorySource
+from awxkit.api.pages.workflow_job_templates import WorkflowJobTemplate
from awxkit.api.mixins import HasCreate
from awxkit.api.resources import resources
from awxkit.config import config
@@ -11,7 +12,7 @@ from . import base
class Schedule(HasCreate, base.Base):
- dependencies = [SystemJobTemplate]
+ dependencies = [JobTemplate, SystemJobTemplate, Project, InventorySource, WorkflowJobTemplate]
NATURAL_KEY = ('unified_job_template', 'name')
def silent_delete(self):
diff --git a/tools/docker-compose-minikube/minikube/defaults/main.yml b/tools/docker-compose-minikube/minikube/defaults/main.yml
index b61ada8d34..a0cb7bdf03 100644
--- a/tools/docker-compose-minikube/minikube/defaults/main.yml
+++ b/tools/docker-compose-minikube/minikube/defaults/main.yml
@@ -9,8 +9,8 @@ addons:
minikube_url_linux: 'https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64'
minikube_url_macos: 'https://storage.googleapis.com/minikube/releases/latest/minikube-darwin-amd64'
-kubectl_url_linux: 'https://dl.k8s.io/release/v1.21.0/bin/linux/amd64/kubectl'
-kubectl_url_macos: 'https://dl.k8s.io/release/v1.21.0/bin/darwin/amd64/kubectl'
+kubectl_url_linux: 'https://dl.k8s.io/release/v1.25.0/bin/linux/amd64/kubectl'
+kubectl_url_macos: 'https://dl.k8s.io/release/v1.25.0/bin/darwin/amd64/kubectl'
# Service Account Name
minikube_service_account_name: 'awx-devel'
diff --git a/tools/docker-compose-minikube/minikube/tasks/main.yml b/tools/docker-compose-minikube/minikube/tasks/main.yml
index 9ddef11167..0cf9c841a9 100644
--- a/tools/docker-compose-minikube/minikube/tasks/main.yml
+++ b/tools/docker-compose-minikube/minikube/tasks/main.yml
@@ -8,6 +8,10 @@
state: 'directory'
mode: '0700'
+- name: debug minikube_setup
+ debug:
+ var: minikube_setup
+
# Linux block
- block:
- name: Download Minikube
@@ -24,6 +28,7 @@
when:
- ansible_architecture == "x86_64"
- ansible_system == "Linux"
+ - minikube_setup | default(False) | bool
# MacOS block
- block:
@@ -41,25 +46,29 @@
when:
- ansible_architecture == "x86_64"
- ansible_system == "Darwin"
+ - minikube_setup | default(False) | bool
-- name: Starting Minikube
- shell: "{{ sources_dest }}/minikube start --driver={{ driver }} --install-addons=true --addons={{ addons | join(',') }}"
- register: minikube_stdout
+- block:
+ - name: Starting Minikube
+ shell: "{{ sources_dest }}/minikube start --driver={{ driver }} --install-addons=true --addons={{ addons | join(',') }}"
+ register: minikube_stdout
-- name: Enable Ingress Controller on Minikube
- shell: "{{ sources_dest }}/minikube addons enable ingress"
- when:
- - minikube_stdout.rc == 0
- register: _minikube_ingress
- ignore_errors: true
+ - name: Enable Ingress Controller on Minikube
+ shell: "{{ sources_dest }}/minikube addons enable ingress"
+ when:
+ - minikube_stdout.rc == 0
+ register: _minikube_ingress
+ ignore_errors: true
-- name: Show Minikube Ingress known-issue 7332 warning
- pause:
- seconds: 5
- prompt: "The Minikube Ingress addon has been disabled since it looks like you are hitting https://github.com/kubernetes/minikube/issues/7332"
+ - name: Show Minikube Ingress known-issue 7332 warning
+ pause:
+ seconds: 5
+ prompt: "The Minikube Ingress addon has been disabled since it looks like you are hitting https://github.com/kubernetes/minikube/issues/7332"
+ when:
+ - '"minikube/issues/7332" in _minikube_ingress.stderr'
+ - ansible_system == "Darwin"
when:
- - '"minikube/issues/7332" in _minikube_ingress.stderr'
- - ansible_system == "Darwin"
+ - minikube_setup | default(False) | bool
- name: Create ServiceAccount and clusterRoleBinding
k8s:
diff --git a/tools/docker-compose/README.md b/tools/docker-compose/README.md
index 530b4dc3e5..dd66c41450 100644
--- a/tools/docker-compose/README.md
+++ b/tools/docker-compose/README.md
@@ -28,7 +28,7 @@ Here are the main `make` targets:
Notable files:
- `tools/docker-compose/inventory` file - used to configure the AWX development environment.
-- `migrate.yml` - playbook for migrating data from Local Docker to the Development Environment
+- `tools/docker-compose/ansible/migrate.yml` - playbook for migrating data from Local Docker to the Development Environment
### Prerequisites
@@ -301,11 +301,19 @@ Note that you may see multiple messages of the form `2021-03-04 20:11:47,666 WAR
To bring up a 1 node AWX + minikube that is accessible from AWX run the following.
+Start minikube
+
+```bash
+(host)$minikube start --cpus=4 --memory=8g --addons=ingress`
+```
+
+Start AWX
+
```bash
(host)$ make docker-compose-container-group
```
-Alternatively, you can set the env var `MINIKUBE_CONTAINER_GROUP=true` to use the default dev env bring up. his way you can use other env flags like the cluster node count.
+Alternatively, you can set the env var `MINIKUBE_CONTAINER_GROUP=true` to use the default dev env bring up. his way you can use other env flags like the cluster node count. Set `MINIKUBE_SETUP=true` to make the roles download, install and run minikube for you, but if you run into issues with this just start minikube yourself.
```bash
(host)$ MINIKUBE_CONTAINER_GROUP=true make docker-compose