summaryrefslogtreecommitdiffstats
path: root/lib/ansible/executor/module_common.py
blob: fea097cf90337c6ee652a4ceeb050841fbb6cd92 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
# (c) 2013-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2015 Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.

# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type

import ast
import base64
import datetime
import json
import os
import shlex
import zipfile
import re
import pkgutil
from io import BytesIO

from ansible.release import __version__, __author__
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.executor.interpreter_discovery import InterpreterDiscoveryRequiredError
from ansible.executor.powershell import module_manifest as ps_manifest
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
from ansible.plugins.loader import module_utils_loader
from ansible.utils.collection_loader._collection_finder import _get_collection_metadata

# Must import strategy and use write_locks from there
# If we import write_locks directly then we end up binding a
# variable to the object and then it never gets updated.
from ansible.executor import action_write_locks

from ansible.utils.display import Display


try:
    import importlib.util
    import importlib.machinery
    imp = None
except ImportError:
    import imp

# if we're on a Python that doesn't have FNFError, redefine it as IOError (since that's what we'll see)
try:
    FileNotFoundError
except NameError:
    FileNotFoundError = IOError

display = Display()

REPLACER = b"#<<INCLUDE_ANSIBLE_MODULE_COMMON>>"
REPLACER_VERSION = b"\"<<ANSIBLE_VERSION>>\""
REPLACER_COMPLEX = b"\"<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>\""
REPLACER_WINDOWS = b"# POWERSHELL_COMMON"
REPLACER_JSONARGS = b"<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>"
REPLACER_SELINUX = b"<<SELINUX_SPECIAL_FILESYSTEMS>>"

# We could end up writing out parameters with unicode characters so we need to
# specify an encoding for the python source file
ENCODING_STRING = u'# -*- coding: utf-8 -*-'
b_ENCODING_STRING = b'# -*- coding: utf-8 -*-'

# module_common is relative to module_utils, so fix the path
_MODULE_UTILS_PATH = os.path.join(os.path.dirname(__file__), '..', 'module_utils')

# ******************************************************************************

ANSIBALLZ_TEMPLATE = u'''%(shebang)s
%(coding)s
_ANSIBALLZ_WRAPPER = True # For test-module.py script to tell this is a ANSIBALLZ_WRAPPER
# This code is part of Ansible, but is an independent component.
# The code in this particular templatable string, and this templatable string
# only, is BSD licensed.  Modules which end up using this snippet, which is
# dynamically combined together by Ansible still belong to the author of the
# module, and they may assign their own license to the complete work.
#
# Copyright (c), James Cammarata, 2016
# Copyright (c), Toshio Kuratomi, 2016
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
#    * Redistributions of source code must retain the above copyright
#      notice, this list of conditions and the following disclaimer.
#    * Redistributions in binary form must reproduce the above copyright notice,
#      this list of conditions and the following disclaimer in the documentation
#      and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def _ansiballz_main():
%(rlimit)s
    import os
    import os.path
    import sys
    import __main__

    # For some distros and python versions we pick up this script in the temporary
    # directory.  This leads to problems when the ansible module masks a python
    # library that another import needs.  We have not figured out what about the
    # specific distros and python versions causes this to behave differently.
    #
    # Tested distros:
    # Fedora23 with python3.4  Works
    # Ubuntu15.10 with python2.7  Works
    # Ubuntu15.10 with python3.4  Fails without this
    # Ubuntu16.04.1 with python3.5  Fails without this
    # To test on another platform:
    # * use the copy module (since this shadows the stdlib copy module)
    # * Turn off pipelining
    # * Make sure that the destination file does not exist
    # * ansible ubuntu16-test -m copy -a 'src=/etc/motd dest=/var/tmp/m'
    # This will traceback in shutil.  Looking at the complete traceback will show
    # that shutil is importing copy which finds the ansible module instead of the
    # stdlib module
    scriptdir = None
    try:
        scriptdir = os.path.dirname(os.path.realpath(__main__.__file__))
    except (AttributeError, OSError):
        # Some platforms don't set __file__ when reading from stdin
        # OSX raises OSError if using abspath() in a directory we don't have
        # permission to read (realpath calls abspath)
        pass
    if scriptdir is not None:
        sys.path = [p for p in sys.path if p != scriptdir]

    import base64
    import runpy
    import shutil
    import tempfile
    import zipfile

    if sys.version_info < (3,):
        PY3 = False
    else:
        PY3 = True

    ZIPDATA = """%(zipdata)s"""

    # Note: temp_path isn't needed once we switch to zipimport
    def invoke_module(modlib_path, temp_path, json_params):
        # When installed via setuptools (including python setup.py install),
        # ansible may be installed with an easy-install.pth file.  That file
        # may load the system-wide install of ansible rather than the one in
        # the module.  sitecustomize is the only way to override that setting.
        z = zipfile.ZipFile(modlib_path, mode='a')

        # py3: modlib_path will be text, py2: it's bytes.  Need bytes at the end
        sitecustomize = u'import sys\\nsys.path.insert(0,"%%s")\\n' %%  modlib_path
        sitecustomize = sitecustomize.encode('utf-8')
        # Use a ZipInfo to work around zipfile limitation on hosts with
        # clocks set to a pre-1980 year (for instance, Raspberry Pi)
        zinfo = zipfile.ZipInfo()
        zinfo.filename = 'sitecustomize.py'
        zinfo.date_time = ( %(year)i, %(month)i, %(day)i, %(hour)i, %(minute)i, %(second)i)
        z.writestr(zinfo, sitecustomize)
        z.close()

        # Put the zipped up module_utils we got from the controller first in the python path so that we
        # can monkeypatch the right basic
        sys.path.insert(0, modlib_path)

        # Monkeypatch the parameters into basic
        from ansible.module_utils import basic
        basic._ANSIBLE_ARGS = json_params
%(coverage)s
        # Run the module!  By importing it as '__main__', it thinks it is executing as a script
        runpy.run_module(mod_name='%(module_fqn)s', init_globals=None, run_name='__main__', alter_sys=True)

        # Ansible modules must exit themselves
        print('{"msg": "New-style module did not handle its own exit", "failed": true}')
        sys.exit(1)

    def debug(command, zipped_mod, json_params):
        # The code here normally doesn't run.  It's only used for debugging on the
        # remote machine.
        #
        # The subcommands in this function make it easier to debug ansiballz
        # modules.  Here's the basic steps:
        #
        # Run ansible with the environment variable: ANSIBLE_KEEP_REMOTE_FILES=1 and -vvv
        # to save the module file remotely::
        #   $ ANSIBLE_KEEP_REMOTE_FILES=1 ansible host1 -m ping -a 'data=october' -vvv
        #
        # Part of the verbose output will tell you where on the remote machine the
        # module was written to::
        #   [...]
        #   <host1> SSH: EXEC ssh -C -q -o ControlMaster=auto -o ControlPersist=60s -o KbdInteractiveAuthentication=no -o
        #   PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o ConnectTimeout=10 -o
        #   ControlPath=/home/badger/.ansible/cp/ansible-ssh-%%h-%%p-%%r -tt rhel7 '/bin/sh -c '"'"'LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8
        #   LC_MESSAGES=en_US.UTF-8 /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping'"'"''
        #   [...]
        #
        # Login to the remote machine and run the module file via from the previous
        # step with the explode subcommand to extract the module payload into
        # source files::
        #   $ ssh host1
        #   $ /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping explode
        #   Module expanded into:
        #   /home/badger/.ansible/tmp/ansible-tmp-1461173408.08-279692652635227/ansible
        #
        # You can now edit the source files to instrument the code or experiment with
        # different parameter values.  When you're ready to run the code you've modified
        # (instead of the code from the actual zipped module), use the execute subcommand like this::
        #   $ /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping execute

        # Okay to use __file__ here because we're running from a kept file
        basedir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'debug_dir')
        args_path = os.path.join(basedir, 'args')

        if command == 'excommunicate':
            print('The excommunicate debug command is deprecated and will be removed in 2.11.  Use execute instead.')
            command = 'execute'

        if command == 'explode':
            # transform the ZIPDATA into an exploded directory of code and then
            # print the path to the code.  This is an easy way for people to look
            # at the code on the remote machine for debugging it in that
            # environment
            z = zipfile.ZipFile(zipped_mod)
            for filename in z.namelist():
                if filename.startswith('/'):
                    raise Exception('Something wrong with this module zip file: should not contain absolute paths')

                dest_filename = os.path.join(basedir, filename)
                if dest_filename.endswith(os.path.sep) and not os.path.exists(dest_filename):
                    os.makedirs(dest_filename)
                else:
                    directory = os.path.dirname(dest_filename)
                    if not os.path.exists(directory):
                        os.makedirs(directory)
                    f = open(dest_filename, 'wb')
                    f.write(z.read(filename))
                    f.close()

            # write the args file
            f = open(args_path, 'wb')
            f.write(json_params)
            f.close()

            print('Module expanded into:')
            print('%%s' %% basedir)
            exitcode = 0

        elif command == 'execute':
            # Execute the exploded code instead of executing the module from the
            # embedded ZIPDATA.  This allows people to easily run their modified
            # code on the remote machine to see how changes will affect it.

            # Set pythonpath to the debug dir
            sys.path.insert(0, basedir)

            # read in the args file which the user may have modified
            with open(args_path, 'rb') as f:
                json_params = f.read()

            # Monkeypatch the parameters into basic
            from ansible.module_utils import basic
            basic._ANSIBLE_ARGS = json_params

            # Run the module!  By importing it as '__main__', it thinks it is executing as a script
            runpy.run_module(mod_name='%(module_fqn)s', init_globals=None, run_name='__main__', alter_sys=True)

            # Ansible modules must exit themselves
            print('{"msg": "New-style module did not handle its own exit", "failed": true}')
            sys.exit(1)

        else:
            print('WARNING: Unknown debug command.  Doing nothing.')
            exitcode = 0

        return exitcode

    #
    # See comments in the debug() method for information on debugging
    #

    ANSIBALLZ_PARAMS = %(params)s
    if PY3:
        ANSIBALLZ_PARAMS = ANSIBALLZ_PARAMS.encode('utf-8')
    try:
        # There's a race condition with the controller removing the
        # remote_tmpdir and this module executing under async.  So we cannot
        # store this in remote_tmpdir (use system tempdir instead)
        # Only need to use [ansible_module]_payload_ in the temp_path until we move to zipimport
        # (this helps ansible-test produce coverage stats)
        temp_path = tempfile.mkdtemp(prefix='ansible_%(ansible_module)s_payload_')

        zipped_mod = os.path.join(temp_path, 'ansible_%(ansible_module)s_payload.zip')
        with open(zipped_mod, 'wb') as modlib:
            modlib.write(base64.b64decode(ZIPDATA))

        if len(sys.argv) == 2:
            exitcode = debug(sys.argv[1], zipped_mod, ANSIBALLZ_PARAMS)
        else:
            # Note: temp_path isn't needed once we switch to zipimport
            invoke_module(zipped_mod, temp_path, ANSIBALLZ_PARAMS)
    finally:
        try:
            shutil.rmtree(temp_path)
        except (NameError, OSError):
            # tempdir creation probably failed
            pass
    sys.exit(exitcode)

if __name__ == '__main__':
    _ansiballz_main()
'''

ANSIBALLZ_COVERAGE_TEMPLATE = '''
        # Access to the working directory is required by coverage.
        # Some platforms, such as macOS, may not allow querying the working directory when using become to drop privileges.
        try:
            os.getcwd()
        except OSError:
            os.chdir('/')

        os.environ['COVERAGE_FILE'] = '%(coverage_output)s'

        import atexit

        try:
            import coverage
        except ImportError:
            print('{"msg": "Could not import `coverage` module.", "failed": true}')
            sys.exit(1)

        cov = coverage.Coverage(config_file='%(coverage_config)s')

        def atexit_coverage():
            cov.stop()
            cov.save()

        atexit.register(atexit_coverage)

        cov.start()
'''

ANSIBALLZ_COVERAGE_CHECK_TEMPLATE = '''
        try:
            if PY3:
                import importlib.util
                if importlib.util.find_spec('coverage') is None:
                    raise ImportError
            else:
                import imp
                imp.find_module('coverage')
        except ImportError:
            print('{"msg": "Could not find `coverage` module.", "failed": true}')
            sys.exit(1)
'''

ANSIBALLZ_RLIMIT_TEMPLATE = '''
    import resource

    existing_soft, existing_hard = resource.getrlimit(resource.RLIMIT_NOFILE)

    # adjust soft limit subject to existing hard limit
    requested_soft = min(existing_hard, %(rlimit_nofile)d)

    if requested_soft != existing_soft:
        try:
            resource.setrlimit(resource.RLIMIT_NOFILE, (requested_soft, existing_hard))
        except ValueError:
            # some platforms (eg macOS) lie about their hard limit
            pass
'''


def _strip_comments(source):
    # Strip comments and blank lines from the wrapper
    buf = []
    for line in source.splitlines():
        l = line.strip()
        if not l or l.startswith(u'#'):
            continue
        buf.append(line)
    return u'\n'.join(buf)


if C.DEFAULT_KEEP_REMOTE_FILES:
    # Keep comments when KEEP_REMOTE_FILES is set.  That way users will see
    # the comments with some nice usage instructions
    ACTIVE_ANSIBALLZ_TEMPLATE = ANSIBALLZ_TEMPLATE
else:
    # ANSIBALLZ_TEMPLATE stripped of comments for smaller over the wire size
    ACTIVE_ANSIBALLZ_TEMPLATE = _strip_comments(ANSIBALLZ_TEMPLATE)

# dirname(dirname(dirname(site-packages/ansible/executor/module_common.py) == site-packages
# Do this instead of getting site-packages from distutils.sysconfig so we work when we
# haven't been installed
site_packages = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
CORE_LIBRARY_PATH_RE = re.compile(r'%s/(?P<path>ansible/modules/.*)\.(py|ps1)$' % site_packages)
COLLECTION_PATH_RE = re.compile(r'/(?P<path>ansible_collections/[^/]+/[^/]+/plugins/modules/.*)\.(py|ps1)$')

# Detect new-style Python modules by looking for required imports:
# import ansible_collections.[my_ns.my_col.plugins.module_utils.my_module_util]
# from ansible_collections.[my_ns.my_col.plugins.module_utils import my_module_util]
# import ansible.module_utils[.basic]
# from ansible.module_utils[ import basic]
# from ansible.module_utils[.basic import AnsibleModule]
# from ..module_utils[ import basic]
# from ..module_utils[.basic import AnsibleModule]
NEW_STYLE_PYTHON_MODULE_RE = re.compile(
    # Relative imports
    br'(?:from +\.{2,} *module_utils.* +import |'
    # Collection absolute imports:
    br'from +ansible_collections\.[^.]+\.[^.]+\.plugins\.module_utils.* +import |'
    br'import +ansible_collections\.[^.]+\.[^.]+\.plugins\.module_utils.*|'
    # Core absolute imports
    br'from +ansible\.module_utils.* +import |'
    br'import +ansible\.module_utils\.)'
)


class ModuleDepFinder(ast.NodeVisitor):

    def __init__(self, module_fqn, *args, **kwargs):
        """
        Walk the ast tree for the python module.
        :arg module_fqn: The fully qualified name to reach this module in dotted notation.
            example: ansible.module_utils.basic

        Save submodule[.submoduleN][.identifier] into self.submodules
        when they are from ansible.module_utils or ansible_collections packages

        self.submodules will end up with tuples like:
          - ('ansible', 'module_utils', 'basic',)
          - ('ansible', 'module_utils', 'urls', 'fetch_url')
          - ('ansible', 'module_utils', 'database', 'postgres')
          - ('ansible', 'module_utils', 'database', 'postgres', 'quote')
          - ('ansible', 'module_utils', 'database', 'postgres', 'quote')
          - ('ansible_collections', 'my_ns', 'my_col', 'plugins', 'module_utils', 'foo')

        It's up to calling code to determine whether the final element of the
        tuple are module names or something else (function, class, or variable names)
        .. seealso:: :python3:class:`ast.NodeVisitor`
        """
        super(ModuleDepFinder, self).__init__(*args, **kwargs)
        self.submodules = set()
        self.module_fqn = module_fqn

    def visit_Import(self, node):
        """
        Handle import ansible.module_utils.MODLIB[.MODLIBn] [as asname]

        We save these as interesting submodules when the imported library is in ansible.module_utils
        or ansible.collections
        """
        for alias in node.names:
            if (alias.name.startswith('ansible.module_utils.') or
                    alias.name.startswith('ansible_collections.')):
                py_mod = tuple(alias.name.split('.'))
                self.submodules.add(py_mod)
        self.generic_visit(node)

    def visit_ImportFrom(self, node):
        """
        Handle from ansible.module_utils.MODLIB import [.MODLIBn] [as asname]

        Also has to handle relative imports

        We save these as interesting submodules when the imported library is in ansible.module_utils
        or ansible.collections
        """

        # FIXME: These should all get skipped:
        # from ansible.executor import module_common
        # from ...executor import module_common
        # from ... import executor (Currently it gives a non-helpful error)
        if node.level > 0:
            if self.module_fqn:
                parts = tuple(self.module_fqn.split('.'))
                if node.module:
                    # relative import: from .module import x
                    node_module = '.'.join(parts[:-node.level] + (node.module,))
                else:
                    # relative import: from . import x
                    node_module = '.'.join(parts[:-node.level])
            else:
                # fall back to an absolute import
                node_module = node.module
        else:
            # absolute import: from module import x
            node_module = node.module

        # Specialcase: six is a special case because of its
        # import logic
        py_mod = None
        if node.names[0].name == '_six':
            self.submodules.add(('_six',))
        elif node_module.startswith('ansible.module_utils'):
            # from ansible.module_utils.MODULE1[.MODULEn] import IDENTIFIER [as asname]
            # from ansible.module_utils.MODULE1[.MODULEn] import MODULEn+1 [as asname]
            # from ansible.module_utils.MODULE1[.MODULEn] import MODULEn+1 [,IDENTIFIER] [as asname]
            # from ansible.module_utils import MODULE1 [,MODULEn] [as asname]
            py_mod = tuple(node_module.split('.'))

        elif node_module.startswith('ansible_collections.'):
            if node_module.endswith('plugins.module_utils') or '.plugins.module_utils.' in node_module:
                # from ansible_collections.ns.coll.plugins.module_utils import MODULE [as aname] [,MODULE2] [as aname]
                # from ansible_collections.ns.coll.plugins.module_utils.MODULE import IDENTIFIER [as aname]
                # FIXME: Unhandled cornercase (needs to be ignored):
                # from ansible_collections.ns.coll.plugins.[!module_utils].[FOO].plugins.module_utils import IDENTIFIER
                py_mod = tuple(node_module.split('.'))
            else:
                # Not from module_utils so ignore.  for instance:
                # from ansible_collections.ns.coll.plugins.lookup import IDENTIFIER
                pass

        if py_mod:
            for alias in node.names:
                self.submodules.add(py_mod + (alias.name,))

        self.generic_visit(node)


def _slurp(path):
    if not os.path.exists(path):
        raise AnsibleError("imported module support code does not exist at %s" % os.path.abspath(path))
    with open(path, 'rb') as fd:
        data = fd.read()
    return data


def _get_shebang(interpreter, task_vars, templar, args=tuple()):
    """
    Note not stellar API:
       Returns None instead of always returning a shebang line.  Doing it this
       way allows the caller to decide to use the shebang it read from the
       file rather than trust that we reformatted what they already have
       correctly.
    """
    interpreter_name = os.path.basename(interpreter).strip()

    # FUTURE: add logical equivalence for python3 in the case of py3-only modules

    # check for first-class interpreter config
    interpreter_config_key = "INTERPRETER_%s" % interpreter_name.upper()

    if C.config.get_configuration_definitions().get(interpreter_config_key):
        # a config def exists for this interpreter type; consult config for the value
        interpreter_out = C.config.get_config_value(interpreter_config_key, variables=task_vars)
        discovered_interpreter_config = u'discovered_interpreter_%s' % interpreter_name

        interpreter_out = templar.template(interpreter_out.strip())

        facts_from_task_vars = task_vars.get('ansible_facts', {})

        # handle interpreter discovery if requested
        if interpreter_out in ['auto', 'auto_legacy', 'auto_silent', 'auto_legacy_silent']:
            if discovered_interpreter_config not in facts_from_task_vars:
                # interpreter discovery is desired, but has not been run for this host
                raise InterpreterDiscoveryRequiredError("interpreter discovery needed",
                                                        interpreter_name=interpreter_name,
                                                        discovery_mode=interpreter_out)
            else:
                interpreter_out = facts_from_task_vars[discovered_interpreter_config]
    else:
        # a config def does not exist for this interpreter type; consult vars for a possible direct override
        interpreter_config = u'ansible_%s_interpreter' % interpreter_name

        if interpreter_config not in task_vars:
            return None, interpreter

        interpreter_out = templar.template(task_vars[interpreter_config].strip())

    shebang = u'#!' + interpreter_out

    if args:
        shebang = shebang + u' ' + u' '.join(args)

    return shebang, interpreter_out


class ModuleInfo:
    def __init__(self, name, paths):
        self.py_src = False
        self.pkg_dir = False
        path = None

        if imp is None:
            # don't pretend this is a top-level module, prefix the rest of the namespace
            self._info = info = importlib.machinery.PathFinder.find_spec('ansible.module_utils.' + name, paths)
            if info is not None:
                self.py_src = os.path.splitext(info.origin)[1] in importlib.machinery.SOURCE_SUFFIXES
                self.pkg_dir = info.origin.endswith('/__init__.py')
                path = info.origin
            else:
                raise ImportError("No module named '%s'" % name)
        else:
            self._info = info = imp.find_module(name, paths)
            self.py_src = info[2][2] == imp.PY_SOURCE
            self.pkg_dir = info[2][2] == imp.PKG_DIRECTORY
            if self.pkg_dir:
                path = os.path.join(info[1], '__init__.py')
            else:
                path = info[1]

        self.path = path

    def get_source(self):
        if imp and self.py_src:
            try:
                return self._info[0].read()
            finally:
                self._info[0].close()
        return _slurp(self.path)

    def __repr__(self):
        return 'ModuleInfo: py_src=%s, pkg_dir=%s, path=%s' % (self.py_src, self.pkg_dir, self.path)


class CollectionModuleInfo(ModuleInfo):
    def __init__(self, name, pkg):
        self._mod_name = name
        self.py_src = True
        self.pkg_dir = False

        split_name = pkg.split('.')
        split_name.append(name)
        if len(split_name) < 5 or split_name[0] != 'ansible_collections' or split_name[3] != 'plugins' or split_name[4] != 'module_utils':
            raise ValueError('must search for something beneath a collection module_utils, not {0}.{1}'.format(to_native(pkg), to_native(name)))

        # NB: we can't use pkgutil.get_data safely here, since we don't want to import/execute package/module code on
        # the controller while analyzing/assembling the module, so we'll have to manually import the collection's
        # Python package to locate it (import root collection, reassemble resource path beneath, fetch source)

        # FIXME: handle MU redirection logic here

        collection_pkg_name = '.'.join(split_name[0:3])
        resource_base_path = os.path.join(*split_name[3:])
        # look for package_dir first, then module

        self._src = pkgutil.get_data(collection_pkg_name, to_native(os.path.join(resource_base_path, '__init__.py')))

        if self._src is not None:  # empty string is OK
            return

        self._src = pkgutil.get_data(collection_pkg_name, to_native(resource_base_path + '.py'))

        if not self._src:
            raise ImportError('unable to load collection-hosted module_util'
                              ' {0}.{1}'.format(to_native(pkg), to_native(name)))

    def get_source(self):
        return self._src


class InternalRedirectModuleInfo(ModuleInfo):
    def __init__(self, name, full_name):
        self.pkg_dir = None
        self._original_name = full_name
        self.path = full_name.replace('.', '/') + '.py'
        collection_meta = _get_collection_metadata('ansible.builtin')
        redirect = collection_meta.get('plugin_routing', {}).get('module_utils', {}).get(name, {}).get('redirect', None)
        if not redirect:
            raise ImportError('no redirect found for {0}'.format(name))
        self._redirect = redirect
        self.py_src = True
        self._shim_src = """
import sys
import {1} as mod

sys.modules['{0}'] = mod
""".format(self._original_name, self._redirect)

    def get_source(self):
        return self._shim_src


def recursive_finder(name, module_fqn, data, py_module_names, py_module_cache, zf):
    """
    Using ModuleDepFinder, make sure we have all of the module_utils files that
    the module and its module_utils files needs.
    :arg name: Name of the python module we're examining
    :arg module_fqn: Fully qualified name of the python module we're scanning
    :arg py_module_names: set of the fully qualified module names represented as a tuple of their
        FQN with __init__ appended if the module is also a python package).  Presence of a FQN in
        this set means that we've already examined it for module_util deps.
    :arg py_module_cache: map python module names (represented as a tuple of their FQN with __init__
        appended if the module is also a python package) to a tuple of the code in the module and
        the pathname the module would have inside of a Python toplevel (like site-packages)
    :arg zf: An open :python:class:`zipfile.ZipFile` object that holds the Ansible module payload
        which we're assembling
    """
    # Parse the module and find the imports of ansible.module_utils
    try:
        tree = ast.parse(data)
    except (SyntaxError, IndentationError) as e:
        raise AnsibleError("Unable to import %s due to %s" % (name, e.msg))

    finder = ModuleDepFinder(module_fqn)
    finder.visit(tree)

    #
    # Determine what imports that we've found are modules (vs class, function.
    # variable names) for packages
    #
    module_utils_paths = [p for p in module_utils_loader._get_paths(subdirs=False) if os.path.isdir(p)]
    # FIXME: Do we still need this?  It feels like module-utils_loader should include
    # _MODULE_UTILS_PATH
    module_utils_paths.append(_MODULE_UTILS_PATH)

    normalized_modules = set()
    # Loop through the imports that we've found to normalize them
    # Exclude paths that match with paths we've already processed
    # (Have to exclude them a second time once the paths are processed)

    for py_module_name in finder.submodules.difference(py_module_names):
        module_info = None

        if py_module_name[0:3] == ('ansible', 'module_utils', 'six'):
            # Special case the python six library because it messes with the
            # import process in an incompatible way
            module_info = ModuleInfo('six', module_utils_paths)
            py_module_name = ('ansible', 'module_utils', 'six')
            idx = 0
        elif py_module_name[0:3] == ('ansible', 'module_utils', '_six'):
            # Special case the python six library because it messes with the
            # import process in an incompatible way
            module_info = ModuleInfo('_six', [os.path.join(p, 'six') for p in module_utils_paths])
            py_module_name = ('ansible', 'module_utils', 'six', '_six')
            idx = 0
        elif py_module_name[0] == 'ansible_collections':
            # FIXME (nitz): replicate module name resolution like below for granular imports
            for idx in (1, 2):
                if len(py_module_name) < idx:
                    break
                try:
                    # this is a collection-hosted MU; look it up with pkgutil.get_data()
                    module_info = CollectionModuleInfo(py_module_name[-idx], '.'.join(py_module_name[:-idx]))
                    break
                except ImportError:
                    continue
        elif py_module_name[0:2] == ('ansible', 'module_utils'):
            # Need to remove ansible.module_utils because PluginLoader may find different paths
            # for us to look in
            relative_module_utils_dir = py_module_name[2:]
            # Check whether either the last or the second to last identifier is
            # a module name
            for idx in (1, 2):
                if len(relative_module_utils_dir) < idx:
                    break
                try:
                    module_info = ModuleInfo(py_module_name[-idx],
                                             [os.path.join(p, *relative_module_utils_dir[:-idx]) for p in module_utils_paths])
                    break
                except ImportError:
                    # check metadata for redirect, generate stub if present
                    try:
                        module_info = InternalRedirectModuleInfo(py_module_name[-idx],
                                                                 '.'.join(py_module_name[:(None if idx == 1 else -1)]))
                        break
                    except ImportError:
                        continue
        else:
            # If we get here, it's because of a bug in ModuleDepFinder.  If we get a reproducer we
            # should then fix ModuleDepFinder
            display.warning('ModuleDepFinder improperly found a non-module_utils import %s'
                            % [py_module_name])
            continue

        # Could not find the module.  Construct a helpful error message.
        if module_info is None:
            msg = ['Could not find imported module support code for %s.  Looked for' % (name,)]
            if idx == 2:
                msg.append('either %s.py or %s.py' % (py_module_name[-1], py_module_name[-2]))
            else:
                msg.append(py_module_name[-1])
            raise AnsibleError(' '.join(msg))

        if isinstance(module_info, CollectionModuleInfo):
            if idx == 2:
                # We've determined that the last portion was an identifier and
                # thus, not part of the module name
                py_module_name = py_module_name[:-1]

            # HACK: maybe surface collection dirs in here and use existing find_module code?
            normalized_name = py_module_name
            normalized_data = module_info.get_source()
            normalized_path = os.path.join(*py_module_name)
            py_module_cache[normalized_name] = (normalized_data, normalized_path)
            normalized_modules.add(normalized_name)

            # HACK: walk back up the package hierarchy to pick up package inits; this won't do the right thing
            # for actual packages yet...
            accumulated_pkg_name = []
            for pkg in py_module_name[:-1]:
                accumulated_pkg_name.append(pkg)  # we're accumulating this across iterations
                normalized_name = tuple(accumulated_pkg_name[:] + ['__init__'])  # extra machinations to get a hashable type (list is not)
                if normalized_name not in py_module_cache:
                    normalized_path = os.path.join(*accumulated_pkg_name)
                    # HACK: possibly preserve some of the actual package file contents; problematic for extend_paths and others though?
                    normalized_data = ''
                    py_module_cache[normalized_name] = (normalized_data, normalized_path)
                    normalized_modules.add(normalized_name)

        else:
            # Found a byte compiled file rather than source.  We cannot send byte
            # compiled over the wire as the python version might be different.
            # imp.find_module seems to prefer to return source packages so we just
            # error out if imp.find_module returns byte compiled files (This is
            # fragile as it depends on undocumented imp.find_module behaviour)
            if not module_info.pkg_dir and not module_info.py_src:
                msg = ['Could not find python source for imported module support code for %s.  Looked for' % name]
                if idx == 2:
                    msg.append('either %s.py or %s.py' % (py_module_name[-1], py_module_name[-2]))
                else:
                    msg.append(py_module_name[-1])
                raise AnsibleError(' '.join(msg))

            if idx == 2:
                # We've determined that the last portion was an identifier and
                # thus, not part of the module name
                py_module_name = py_module_name[:-1]

            # If not already processed then we've got work to do
            # If not in the cache, then read the file into the cache
            # We already have a file handle for the module open so it makes
            # sense to read it now
            if py_module_name not in py_module_cache:
                if module_info.pkg_dir:
                    # Read the __init__.py instead of the module file as this is
                    # a python package
                    normalized_name = py_module_name + ('__init__',)
                    if normalized_name not in py_module_names:
                        normalized_data = module_info.get_source()
                        py_module_cache[normalized_name] = (normalized_data, module_info.path)
                        normalized_modules.add(normalized_name)
                else:
                    normalized_name = py_module_name
                    if normalized_name not in py_module_names:
                        normalized_data = module_info.get_source()
                        py_module_cache[normalized_name] = (normalized_data, module_info.path)
                        normalized_modules.add(normalized_name)

                #
                # Make sure that all the packages that this module is a part of
                # are also added
                #
                for i in range(1, len(py_module_name)):
                    py_pkg_name = py_module_name[:-i] + ('__init__',)
                    if py_pkg_name not in py_module_names:
                        # Need to remove ansible.module_utils because PluginLoader may find
                        # different paths for us to look in
                        relative_module_utils = py_pkg_name[2:]
                        pkg_dir_info = ModuleInfo(relative_module_utils[-1],
                                                  [os.path.join(p, *relative_module_utils[:-1]) for p in module_utils_paths])
                        normalized_modules.add(py_pkg_name)
                        py_module_cache[py_pkg_name] = (pkg_dir_info.get_source(), pkg_dir_info.path)

    # FIXME: Currently the AnsiBallZ wrapper monkeypatches module args into a global
    # variable in basic.py.  If a module doesn't import basic.py, then the AnsiBallZ wrapper will
    # traceback when it tries to monkypatch.  So, for now, we have to unconditionally include
    # basic.py.
    #
    # In the future we need to change the wrapper to monkeypatch the args into a global variable in
    # their own, separate python module.  That way we won't require basic.py.  Modules which don't
    # want basic.py can import that instead.  AnsibleModule will need to change to import the vars
    # from the separate python module and mirror the args into its global variable for backwards
    # compatibility.
    if ('ansible', 'module_utils', 'basic',) not in py_module_names:
        pkg_dir_info = ModuleInfo('basic', module_utils_paths)
        normalized_modules.add(('ansible', 'module_utils', 'basic',))
        py_module_cache[('ansible', 'module_utils', 'basic',)] = (pkg_dir_info.get_source(), pkg_dir_info.path)
    # End of AnsiballZ hack

    #
    # iterate through all of the ansible.module_utils* imports that we haven't
    # already checked for new imports
    #

    # set of modules that we haven't added to the zipfile
    unprocessed_py_module_names = normalized_modules.difference(py_module_names)

    for py_module_name in unprocessed_py_module_names:

        py_module_path = os.path.join(*py_module_name)
        py_module_file_name = '%s.py' % py_module_path

        zf.writestr(py_module_file_name, py_module_cache[py_module_name][0])
        display.vvvvv("Using module_utils file %s" % py_module_cache[py_module_name][1])

    # Add the names of the files we're scheduling to examine in the loop to
    # py_module_names so that we don't re-examine them in the next pass
    # through recursive_finder()
    py_module_names.update(unprocessed_py_module_names)

    for py_module_file in unprocessed_py_module_names:
        next_fqn = '.'.join(py_module_file)
        recursive_finder(py_module_file[-1], next_fqn, py_module_cache[py_module_file][0],
                         py_module_names, py_module_cache, zf)
        # Save memory; the file won't have to be read again for this ansible module.
        del py_module_cache[py_module_file]


def _is_binary(b_module_data):
    textchars = bytearray(set([7, 8, 9, 10, 12, 13, 27]) | set(range(0x20, 0x100)) - set([0x7f]))
    start = b_module_data[:1024]
    return bool(start.translate(None, textchars))


def _get_ansible_module_fqn(module_path):
    """
    Get the fully qualified name for an ansible module based on its pathname

    remote_module_fqn is the fully qualified name.  Like ansible.modules.system.ping
    Or ansible_collections.Namespace.Collection_name.plugins.modules.ping
    .. warning:: This function is for ansible modules only.  It won't work for other things
        (non-module plugins, etc)
    """
    remote_module_fqn = None

    # Is this a core module?
    match = CORE_LIBRARY_PATH_RE.search(module_path)
    if not match:
        # Is this a module in a collection?
        match = COLLECTION_PATH_RE.search(module_path)

    # We can tell the FQN for core modules and collection modules
    if match:
        path = match.group('path')
        if '.' in path:
            # FQNs must be valid as python identifiers.  This sanity check has failed.
            # we could check other things as well
            raise ValueError('Module name (or path) was not a valid python identifier')

        remote_module_fqn = '.'.join(path.split('/'))
    else:
        # Currently we do not handle modules in roles so we can end up here for that reason
        raise ValueError("Unable to determine module's fully qualified name")

    return remote_module_fqn


def _add_module_to_zip(zf, remote_module_fqn, b_module_data):
    """Add a module from ansible or from an ansible collection into the module zip"""
    module_path_parts = remote_module_fqn.split('.')

    # Write the module
    module_path = '/'.join(module_path_parts) + '.py'
    zf.writestr(module_path, b_module_data)

    # Write the __init__.py's necessary to get there
    if module_path_parts[0] == 'ansible':
        # The ansible namespace is setup as part of the module_utils setup...
        start = 2
        existing_paths = frozenset()
    else:
        # ... but ansible_collections and other toplevels are not
        start = 1
        existing_paths = frozenset(zf.namelist())

    for idx in range(start, len(module_path_parts)):
        package_path = '/'.join(module_path_parts[:idx]) + '/__init__.py'
        # If a collections module uses module_utils from a collection then most packages will have already been added by recursive_finder.
        if package_path in existing_paths:
            continue
        # Note: We don't want to include more than one ansible module in a payload at this time
        # so no need to fill the __init__.py with namespace code
        zf.writestr(package_path, b'')


def _find_module_utils(module_name, b_module_data, module_path, module_args, task_vars, templar, module_compression, async_timeout, become,
                       become_method, become_user, become_password, become_flags, environment):
    """
    Given the source of the module, convert it to a Jinja2 template to insert
    module code and return whether it's a new or old style module.
    """
    module_substyle = module_style = 'old'

    # module_style is something important to calling code (ActionBase).  It
    # determines how arguments are formatted (json vs k=v) and whether
    # a separate arguments file needs to be sent over the wire.
    # module_substyle is extra information that's useful internally.  It tells
    # us what we have to look to substitute in the module files and whether
    # we're using module replacer or ansiballz to format the module itself.
    if _is_binary(b_module_data):
        module_substyle = module_style = 'binary'
    elif REPLACER in b_module_data:
        # Do REPLACER before from ansible.module_utils because we need make sure
        # we substitute "from ansible.module_utils basic" for REPLACER
        module_style = 'new'
        module_substyle = 'python'
        b_module_data = b_module_data.replace(REPLACER, b'from ansible.module_utils.basic import *')
    elif NEW_STYLE_PYTHON_MODULE_RE.search(b_module_data):
        module_style = 'new'
        module_substyle = 'python'
    elif REPLACER_WINDOWS in b_module_data:
        module_style = 'new'
        module_substyle = 'powershell'
        b_module_data = b_module_data.replace(REPLACER_WINDOWS, b'#Requires -Module Ansible.ModuleUtils.Legacy')
    elif re.search(b'#Requires -Module', b_module_data, re.IGNORECASE) \
            or re.search(b'#Requires -Version', b_module_data, re.IGNORECASE)\
            or re.search(b'#AnsibleRequires -OSVersion', b_module_data, re.IGNORECASE) \
            or re.search(b'#AnsibleRequires -Powershell', b_module_data, re.IGNORECASE) \
            or re.search(b'#AnsibleRequires -CSharpUtil', b_module_data, re.IGNORECASE):
        module_style = 'new'
        module_substyle = 'powershell'
    elif REPLACER_JSONARGS in b_module_data:
        module_style = 'new'
        module_substyle = 'jsonargs'
    elif b'WANT_JSON' in b_module_data:
        module_substyle = module_style = 'non_native_want_json'

    shebang = None
    # Neither old-style, non_native_want_json nor binary modules should be modified
    # except for the shebang line (Done by modify_module)
    if module_style in ('old', 'non_native_want_json', 'binary'):
        return b_module_data, module_style, shebang

    output = BytesIO()
    py_module_names = set()

    try:
        remote_module_fqn = _get_ansible_module_fqn(module_path)
    except ValueError:
        # Modules in roles currently are not found by the fqn heuristic so we
        # fallback to this.  This means that relative imports inside a module from
        # a role may fail.  Absolute imports should be used for future-proofness.
        # People should start writing collections instead of modules in roles so we
        # may never fix this
        display.debug('ANSIBALLZ: Could not determine module FQN')
        remote_module_fqn = 'ansible.modules.%s' % module_name

    if module_substyle == 'python':
        params = dict(ANSIBLE_MODULE_ARGS=module_args,)
        try:
            python_repred_params = repr(json.dumps(params))
        except TypeError as e:
            raise AnsibleError("Unable to pass options to module, they must be JSON serializable: %s" % to_native(e))

        try:
            compression_method = getattr(zipfile, module_compression)
        except AttributeError:
            display.warning(u'Bad module compression string specified: %s.  Using ZIP_STORED (no compression)' % module_compression)
            compression_method = zipfile.ZIP_STORED

        lookup_path = os.path.join(C.DEFAULT_LOCAL_TMP, 'ansiballz_cache')
        cached_module_filename = os.path.join(lookup_path, "%s-%s" % (module_name, module_compression))

        zipdata = None
        # Optimization -- don't lock if the module has already been cached
        if os.path.exists(cached_module_filename):
            display.debug('ANSIBALLZ: using cached module: %s' % cached_module_filename)
            with open(cached_module_filename, 'rb') as module_data:
                zipdata = module_data.read()
        else:
            if module_name in action_write_locks.action_write_locks:
                display.debug('ANSIBALLZ: Using lock for %s' % module_name)
                lock = action_write_locks.action_write_locks[module_name]
            else:
                # If the action plugin directly invokes the module (instead of
                # going through a strategy) then we don't have a cross-process
                # Lock specifically for this module.  Use the "unexpected
                # module" lock instead
                display.debug('ANSIBALLZ: Using generic lock for %s' % module_name)
                lock = action_write_locks.action_write_locks[None]

            display.debug('ANSIBALLZ: Acquiring lock')
            with lock:
                display.debug('ANSIBALLZ: Lock acquired: %s' % id(lock))
                # Check that no other process has created this while we were
                # waiting for the lock
                if not os.path.exists(cached_module_filename):
                    display.debug('ANSIBALLZ: Creating module')
                    # Create the module zip data
                    zipoutput = BytesIO()
                    zf = zipfile.ZipFile(zipoutput, mode='w', compression=compression_method)

                    # py_module_cache maps python module names to a tuple of the code in the module
                    # and the pathname to the module.  See the recursive_finder() documentation for
                    # more info.
                    # Here we pre-load it with modules which we create without bothering to
                    # read from actual files (In some cases, these need to differ from what ansible
                    # ships because they're namespace packages in the module)
                    py_module_cache = {
                        ('ansible', '__init__',): (
                            b'from pkgutil import extend_path\n'
                            b'__path__=extend_path(__path__,__name__)\n'
                            b'__version__="' + to_bytes(__version__) +
                            b'"\n__author__="' + to_bytes(__author__) + b'"\n',
                            'ansible/__init__.py'),
                        ('ansible', 'module_utils', '__init__',): (
                            b'from pkgutil import extend_path\n'
                            b'__path__=extend_path(__path__,__name__)\n',
                            'ansible/module_utils/__init__.py')}

                    for (py_module_name, (file_data, filename)) in py_module_cache.items():
                        zf.writestr(filename, file_data)
                        # py_module_names keeps track of which modules we've already scanned for
                        # module_util dependencies
                        py_module_names.add(py_module_name)

                    # Returning the ast tree is a temporary hack.  We need to know if the module has
                    # a main() function or not as we are deprecating new-style modules without
                    # main().  Because parsing the ast is expensive, return it from recursive_finder
                    # instead of reparsing.  Once the deprecation is over and we remove that code,
                    # also remove returning of the ast tree.
                    recursive_finder(module_name, remote_module_fqn, b_module_data, py_module_names,
                                     py_module_cache, zf)

                    display.debug('ANSIBALLZ: Writing module into payload')
                    _add_module_to_zip(zf, remote_module_fqn, b_module_data)

                    zf.close()
                    zipdata = base64.b64encode(zipoutput.getvalue())

                    # Write the assembled module to a temp file (write to temp
                    # so that no one looking for the file reads a partially
                    # written file)
                    if not os.path.exists(lookup_path):
                        # Note -- if we have a global function to setup, that would
                        # be a better place to run this
                        os.makedirs(lookup_path)
                    display.debug('ANSIBALLZ: Writing module')
                    with open(cached_module_filename + '-part', 'wb') as f:
                        f.write(zipdata)

                    # Rename the file into its final position in the cache so
                    # future users of this module can read it off the
                    # filesystem instead of constructing from scratch.
                    display.debug('ANSIBALLZ: Renaming module')
                    os.rename(cached_module_filename + '-part', cached_module_filename)
                    display.debug('ANSIBALLZ: Done creating module')

            if zipdata is None:
                display.debug('ANSIBALLZ: Reading module after lock')
                # Another process wrote the file while we were waiting for
                # the write lock.  Go ahead and read the data from disk
                # instead of re-creating it.
                try:
                    with open(cached_module_filename, 'rb') as f:
                        zipdata = f.read()
                except IOError:
                    raise AnsibleError('A different worker process failed to create module file. '
                                       'Look at traceback for that process for debugging information.')
        zipdata = to_text(zipdata, errors='surrogate_or_strict')

        shebang, interpreter = _get_shebang(u'/usr/bin/python', task_vars, templar)
        if shebang is None:
            shebang = u'#!/usr/bin/python'

        # FUTURE: the module cache entry should be invalidated if we got this value from a host-dependent source
        rlimit_nofile = C.config.get_config_value('PYTHON_MODULE_RLIMIT_NOFILE', variables=task_vars)

        if not isinstance(rlimit_nofile, int):
            rlimit_nofile = int(templar.template(rlimit_nofile))

        if rlimit_nofile:
            rlimit = ANSIBALLZ_RLIMIT_TEMPLATE % dict(
                rlimit_nofile=rlimit_nofile,
            )
        else:
            rlimit = ''

        coverage_config = os.environ.get('_ANSIBLE_COVERAGE_CONFIG')

        if coverage_config:
            coverage_output = os.environ['_ANSIBLE_COVERAGE_OUTPUT']

            if coverage_output:
                # Enable code coverage analysis of the module.
                # This feature is for internal testing and may change without notice.
                coverage = ANSIBALLZ_COVERAGE_TEMPLATE % dict(
                    coverage_config=coverage_config,
                    coverage_output=coverage_output,
                )
            else:
                # Verify coverage is available without importing it.
                # This will detect when a module would fail with coverage enabled with minimal overhead.
                coverage = ANSIBALLZ_COVERAGE_CHECK_TEMPLATE
        else:
            coverage = ''

        now = datetime.datetime.utcnow()
        output.write(to_bytes(ACTIVE_ANSIBALLZ_TEMPLATE % dict(
            zipdata=zipdata,
            ansible_module=module_name,
            module_fqn=remote_module_fqn,
            params=python_repred_params,
            shebang=shebang,
            coding=ENCODING_STRING,
            year=now.year,
            month=now.month,
            day=now.day,
            hour=now.hour,
            minute=now.minute,
            second=now.second,
            coverage=coverage,
            rlimit=rlimit,
        )))
        b_module_data = output.getvalue()

    elif module_substyle == 'powershell':
        # Powershell/winrm don't actually make use of shebang so we can
        # safely set this here.  If we let the fallback code handle this
        # it can fail in the presence of the UTF8 BOM commonly added by
        # Windows text editors
        shebang = u'#!powershell'
        # create the common exec wrapper payload and set that as the module_data
        # bytes
        b_module_data = ps_manifest._create_powershell_wrapper(
            b_module_data, module_path, module_args, environment,
            async_timeout, become, become_method, become_user, become_password,
            become_flags, module_substyle, task_vars, remote_module_fqn
        )

    elif module_substyle == 'jsonargs':
        module_args_json = to_bytes(json.dumps(module_args))

        # these strings could be included in a third-party module but
        # officially they were included in the 'basic' snippet for new-style
        # python modules (which has been replaced with something else in
        # ansiballz) If we remove them from jsonargs-style module replacer
        # then we can remove them everywhere.
        python_repred_args = to_bytes(repr(module_args_json))
        b_module_data = b_module_data.replace(REPLACER_VERSION, to_bytes(repr(__version__)))
        b_module_data = b_module_data.replace(REPLACER_COMPLEX, python_repred_args)
        b_module_data = b_module_data.replace(REPLACER_SELINUX, to_bytes(','.join(C.DEFAULT_SELINUX_SPECIAL_FS)))

        # The main event -- substitute the JSON args string into the module
        b_module_data = b_module_data.replace(REPLACER_JSONARGS, module_args_json)

        facility = b'syslog.' + to_bytes(task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY), errors='surrogate_or_strict')
        b_module_data = b_module_data.replace(b'syslog.LOG_USER', facility)

    return (b_module_data, module_style, shebang)


def modify_module(module_name, module_path, module_args, templar, task_vars=None, module_compression='ZIP_STORED', async_timeout=0, become=False,
                  become_method=None, become_user=None, become_password=None, become_flags=None, environment=None):
    """
    Used to insert chunks of code into modules before transfer rather than
    doing regular python imports.  This allows for more efficient transfer in
    a non-bootstrapping scenario by not moving extra files over the wire and
    also takes care of embedding arguments in the transferred modules.

    This version is done in such a way that local imports can still be
    used in the module code, so IDEs don't have to be aware of what is going on.

    Example:

    from ansible.module_utils.basic import *

       ... will result in the insertion of basic.py into the module
       from the module_utils/ directory in the source tree.

    For powershell, this code effectively no-ops, as the exec wrapper requires access to a number of
    properties not available here.

    """
    task_vars = {} if task_vars is None else task_vars
    environment = {} if environment is None else environment

    with open(module_path, 'rb') as f:

        # read in the module source
        b_module_data = f.read()

    (b_module_data, module_style, shebang) = _find_module_utils(module_name, b_module_data, module_path, module_args, task_vars, templar, module_compression,
                                                                async_timeout=async_timeout, become=become, become_method=become_method,
                                                                become_user=become_user, become_password=become_password, become_flags=become_flags,
                                                                environment=environment)

    if module_style == 'binary':
        return (b_module_data, module_style, to_text(shebang, nonstring='passthru'))
    elif shebang is None:
        b_lines = b_module_data.split(b"\n", 1)
        if b_lines[0].startswith(b"#!"):
            b_shebang = b_lines[0].strip()
            # shlex.split on python-2.6 needs bytes.  On python-3.x it needs text
            args = shlex.split(to_native(b_shebang[2:], errors='surrogate_or_strict'))

            # _get_shebang() takes text strings
            args = [to_text(a, errors='surrogate_or_strict') for a in args]
            interpreter = args[0]
            b_new_shebang = to_bytes(_get_shebang(interpreter, task_vars, templar, args[1:])[0],
                                     errors='surrogate_or_strict', nonstring='passthru')

            if b_new_shebang:
                b_lines[0] = b_shebang = b_new_shebang

            if os.path.basename(interpreter).startswith(u'python'):
                b_lines.insert(1, b_ENCODING_STRING)

            shebang = to_text(b_shebang, nonstring='passthru', errors='surrogate_or_strict')
        else:
            # No shebang, assume a binary module?
            pass

        b_module_data = b"\n".join(b_lines)

    return (b_module_data, module_style, shebang)


def get_action_args_with_defaults(action, args, defaults, templar):

    tmp_args = {}
    module_defaults = {}

    # Merge latest defaults into dict, since they are a list of dicts
    if isinstance(defaults, list):
        for default in defaults:
            module_defaults.update(default)

    # if I actually have defaults, template and merge
    if module_defaults:
        module_defaults = templar.template(module_defaults)

        # deal with configured group defaults first
        if action in C.config.module_defaults_groups:
            for group in C.config.module_defaults_groups.get(action, []):
                tmp_args.update((module_defaults.get('group/{0}'.format(group)) or {}).copy())

        # handle specific action defaults
        if action in module_defaults:
            tmp_args.update(module_defaults[action].copy())

    # direct args override all
    tmp_args.update(args)

    return tmp_args