1
0
mirror of https://github.com/ansible/awx.git synced 2024-10-31 15:21:13 +03:00

Merge pull request #4072 from rambleraptor/gcp_env_vars

adding additional environment variables for gcp_compute + gcp modules

Reviewed-by: https://github.com/softwarefactory-project-zuul[bot]
This commit is contained in:
softwarefactory-project-zuul[bot] 2019-07-17 20:11:17 +00:00 committed by GitHub
commit 9eb7042d8c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 101 additions and 81 deletions

View File

@ -39,6 +39,14 @@ def gce(cred, env, private_data_dir):
f.close()
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
env['GCE_CREDENTIALS_FILE_PATH'] = path
env['GCP_SERVICE_ACCOUNT_FILE'] = path
# Handle env variables for new module types.
# This includes gcp_compute inventory plugin and
# all new gcp_* modules.
env['GCP_AUTH_KIND'] = 'serviceaccount'
env['GCP_PROJECT'] = project
env['GCP_ENV_TYPE'] = 'tower'
return path

View File

@ -1,9 +1,9 @@
{
"AZURE_SUBSCRIPTION_ID": "fooo",
"AZURE_CLIENT_ID": "fooo",
"AZURE_TENANT": "fooo",
"AZURE_SECRET": "fooo",
"AZURE_CLOUD_ENVIRONMENT": "fooo",
"ANSIBLE_JINJA2_NATIVE": "True",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"AZURE_CLIENT_ID": "fooo",
"AZURE_CLOUD_ENVIRONMENT": "fooo",
"AZURE_SECRET": "fooo",
"AZURE_SUBSCRIPTION_ID": "fooo",
"AZURE_TENANT": "fooo"
}

View File

@ -1,7 +1,7 @@
{
"ANSIBLE_JINJA2_NATIVE": "True",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"AWS_ACCESS_KEY_ID": "fooo",
"AWS_SECRET_ACCESS_KEY": "fooo",
"AWS_SECURITY_TOKEN": "fooo",
"ANSIBLE_JINJA2_NATIVE": "True",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"AWS_SECURITY_TOKEN": "fooo"
}

View File

@ -1,4 +1,8 @@
{
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"GCE_CREDENTIALS_FILE_PATH": "{{ file_reference }}"
"GCE_CREDENTIALS_FILE_PATH": "{{ file_reference }}",
"GCP_AUTH_KIND": "serviceaccount",
"GCP_ENV_TYPE": "tower",
"GCP_PROJECT": "fooo",
"GCP_SERVICE_ACCOUNT_FILE": "{{ file_reference }}"
}

View File

@ -1,6 +1,6 @@
{
"FOREMAN_SERVER": "https://foo.invalid",
"FOREMAN_USER": "fooo",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"FOREMAN_PASSWORD": "fooo",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"FOREMAN_SERVER": "https://foo.invalid",
"FOREMAN_USER": "fooo"
}

View File

@ -1,7 +1,7 @@
{
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"TOWER_HOST": "https://foo.invalid",
"TOWER_USERNAME": "fooo",
"TOWER_PASSWORD": "fooo",
"TOWER_VERIFY_SSL": "False",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"TOWER_USERNAME": "fooo",
"TOWER_VERIFY_SSL": "False"
}

View File

@ -1,9 +1,9 @@
{
"AZURE_SUBSCRIPTION_ID": "fooo",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"AZURE_CLIENT_ID": "fooo",
"AZURE_TENANT": "fooo",
"AZURE_SECRET": "fooo",
"AZURE_CLOUD_ENVIRONMENT": "fooo",
"AZURE_INI_PATH": "{{ file_reference }}",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"AZURE_SECRET": "fooo",
"AZURE_SUBSCRIPTION_ID": "fooo",
"AZURE_TENANT": "fooo"
}

View File

@ -1,4 +1,4 @@
{
"CLOUDFORMS_INI_PATH": "{{ file_reference }}",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"CLOUDFORMS_INI_PATH": "{{ file_reference }}"
}

View File

@ -1,7 +1,7 @@
{
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"AWS_ACCESS_KEY_ID": "fooo",
"AWS_SECRET_ACCESS_KEY": "fooo",
"AWS_SECURITY_TOKEN": "fooo",
"EC2_INI_PATH": "{{ file_reference }}",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"EC2_INI_PATH": "{{ file_reference }}"
}

View File

@ -1,8 +1,12 @@
{
"GCE_EMAIL": "fooo",
"GCE_PROJECT": "fooo",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"GCE_CREDENTIALS_FILE_PATH": "{{ file_reference }}",
"GCE_EMAIL": "fooo",
"GCE_INI_PATH": "{{ file_reference_0 }}",
"GCE_PROJECT": "fooo",
"GCE_ZONE": "us-east4-a,us-west1-b",
"GCE_INI_PATH": "{{ file_reference }}",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"GCP_AUTH_KIND": "serviceaccount",
"GCP_ENV_TYPE": "tower",
"GCP_PROJECT": "fooo",
"GCP_SERVICE_ACCOUNT_FILE": "{{ file_reference }}"
}

View File

@ -1,4 +1,4 @@
{
"OS_CLIENT_CONFIG_FILE": "{{ file_reference }}",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"OS_CLIENT_CONFIG_FILE": "{{ file_reference }}"
}

View File

@ -1,7 +1,7 @@
{
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"OVIRT_INI_PATH": "{{ file_reference }}",
"OVIRT_URL": "https://foo.invalid",
"OVIRT_USERNAME": "fooo",
"OVIRT_PASSWORD": "fooo",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"OVIRT_URL": "https://foo.invalid",
"OVIRT_USERNAME": "fooo"
}

View File

@ -1,4 +1,4 @@
{
"FOREMAN_INI_PATH": "{{ file_reference }}",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"FOREMAN_INI_PATH": "{{ file_reference }}"
}

View File

@ -1,9 +1,9 @@
{
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"TOWER_HOST": "https://foo.invalid",
"TOWER_USERNAME": "fooo",
"TOWER_PASSWORD": "fooo",
"TOWER_VERIFY_SSL": "False",
"TOWER_INVENTORY": "42",
"TOWER_LICENSE_TYPE": "open",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"TOWER_PASSWORD": "fooo",
"TOWER_USERNAME": "fooo",
"TOWER_VERIFY_SSL": "False"
}

View File

@ -1,8 +1,8 @@
{
"VMWARE_USER": "fooo",
"VMWARE_PASSWORD": "fooo",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
"VMWARE_HOST": "https://foo.invalid",
"VMWARE_VALIDATE_CERTS": "False",
"VMWARE_INI_PATH": "{{ file_reference }}",
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never"
"VMWARE_PASSWORD": "fooo",
"VMWARE_USER": "fooo",
"VMWARE_VALIDATE_CERTS": "False"
}

View File

@ -129,6 +129,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
return a dictionary `content` with file contents, keyed off environment variable
that references the file
"""
# build dict env as a mapping of environment variables to file names
# Filter out environment variables which come from runtime environment
env = {}
exclude_keys = set(('PATH', 'INVENTORY_SOURCE_ID', 'INVENTORY_UPDATE_ID'))
@ -142,71 +143,74 @@ def read_content(private_data_dir, raw_env, inventory_update):
env[k] = v
inverse_env = {}
for key, value in env.items():
inverse_env[value] = key
inverse_env.setdefault(value, []).append(key)
cache_file_regex = re.compile(r'/tmp/awx_{0}_[a-zA-Z0-9_]+/{1}_cache[a-zA-Z0-9_]+'.format(
inventory_update.id, inventory_update.source)
)
private_key_regex = re.compile(r'-----BEGIN ENCRYPTED PRIVATE KEY-----.*-----END ENCRYPTED PRIVATE KEY-----')
# read directory content
# build a mapping of the file paths to aliases which will be constant accross runs
dir_contents = {}
references = {}
for filename in os.listdir(private_data_dir):
referenced_paths = set()
file_aliases = {}
filename_list = sorted(os.listdir(private_data_dir), key=lambda fn: inverse_env.get(os.path.join(private_data_dir, fn), [fn])[0])
for filename in filename_list:
if filename in ('args', 'project'):
continue # Ansible runner
abs_file_path = os.path.join(private_data_dir, filename)
file_aliases[abs_file_path] = filename
if abs_file_path in inverse_env:
env_key = inverse_env[abs_file_path]
references[abs_file_path] = env_key
env[env_key] = '{{ file_reference }}'
referenced_paths.add(abs_file_path)
alias = 'file_reference'
for i in range(10):
if alias not in file_aliases.values():
break
alias = 'file_reference_{}'.format(i)
else:
raise RuntimeError('Test not able to cope with >10 references by env vars. '
'Something probably went very wrong.')
file_aliases[abs_file_path] = alias
for env_key in inverse_env[abs_file_path]:
env[env_key] = '{{{{ {} }}}}'.format(alias)
try:
with open(abs_file_path, 'r') as f:
dir_contents[abs_file_path] = f.read()
# Declare a reference to inventory plugin file if it exists
if abs_file_path.endswith('.yml') and 'plugin: ' in dir_contents[abs_file_path]:
references[abs_file_path] = filename # plugin filenames are universal
referenced_paths.add(abs_file_path) # used as inventory file
elif cache_file_regex.match(abs_file_path):
file_aliases[abs_file_path] = 'cache_file'
except IsADirectoryError:
dir_contents[abs_file_path] = '<directory>'
if cache_file_regex.match(abs_file_path):
file_aliases[abs_file_path] = 'cache_dir'
# Declare cross-file references, also use special keywords if it is the cache
cache_referenced = False
cache_present = False
# Substitute in aliases for cross-file references
for abs_file_path, file_content in dir_contents.copy().items():
if cache_file_regex.match(file_content):
cache_referenced = True
if 'cache_dir' not in file_aliases.values() and 'cache_file' not in file_aliases in file_aliases.values():
raise AssertionError(
'A cache file was referenced but never created, files:\n{}'.format(
json.dumps(dir_contents, indent=4)))
# if another files path appears in this file, replace it with its alias
for target_path in dir_contents.keys():
other_alias = file_aliases[target_path]
if target_path in file_content:
if target_path in references:
raise AssertionError(
'File {} is referenced by env var or other file as well as file {}:\n{}\n{}'.format(
target_path, abs_file_path, json.dumps(env, indent=4), json.dumps(dir_contents, indent=4)))
else:
if cache_file_regex.match(target_path):
cache_present = True
if os.path.isdir(target_path):
keyword = 'cache_dir'
else:
keyword = 'cache_file'
references[target_path] = keyword
new_file_content = cache_file_regex.sub('{{ ' + keyword + ' }}', file_content)
else:
references[target_path] = 'file_reference'
new_file_content = file_content.replace(target_path, '{{ file_reference }}')
dir_contents[abs_file_path] = new_file_content
if cache_referenced and not cache_present:
raise AssertionError(
'A cache file was referenced but never created, files:\n{}'.format(
json.dumps(dir_contents, indent=4)))
referenced_paths.add(target_path)
dir_contents[abs_file_path] = file_content.replace(target_path, '{{ ' + other_alias + ' }}')
# build dict content which is the directory contents keyed off the file aliases
content = {}
for abs_file_path, file_content in dir_contents.items():
if abs_file_path not in references:
# assert that all files laid down are used
if abs_file_path not in referenced_paths:
raise AssertionError(
"File {} is not referenced. References and files:\n{}\n{}".format(
abs_file_path, json.dumps(references, indent=4), json.dumps(dir_contents, indent=4)))
reference_key = references[abs_file_path]
abs_file_path, json.dumps(env, indent=4), json.dumps(dir_contents, indent=4)))
file_content = private_key_regex.sub('{{private_key}}', file_content)
content[reference_key] = file_content
content[file_aliases[abs_file_path]] = file_content
return (env, content)
@ -223,7 +227,7 @@ def create_reference_data(source_dir, env, content):
f.write(content)
if env:
with open(os.path.join(source_dir, 'env.json'), 'w') as f:
f.write(json.dumps(env, indent=4))
json.dump(env, f, indent=4, sort_keys=True)
@pytest.mark.django_db
@ -283,7 +287,7 @@ def test_inventory_update_injected_content(this_kind, script_or_plugin, inventor
for f_name in expected_file_list:
with open(os.path.join(files_dir, f_name), 'r') as f:
ref_content = f.read()
assert ref_content == content[f_name]
assert ref_content == content[f_name], f_name
try:
with open(os.path.join(source_dir, 'env.json'), 'r') as f:
ref_env_text = f.read()