Skip to content
This repository has been archived by the owner on Feb 3, 2021. It is now read-only.

Commit

Permalink
Bug: fix loading local spark config (#282)
Browse files Browse the repository at this point in the history
  • Loading branch information
jafreck authored Dec 15, 2017
1 parent 46fd444 commit 1d9d9ca
Showing 1 changed file with 18 additions and 14 deletions.
32 changes: 18 additions & 14 deletions cli/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,34 +277,38 @@ def merge(self, cluster_id, username, job_ui_port, job_history_ui_port, web_ui_p


def load_aztk_spark_config():
# try load global
jars_src = os.path.join(aztk.utils.constants.GLOBAL_CONFIG_PATH, 'jars')
def get_file_if_exists(file, local: bool):
if local:
if os.path.exists(os.path.join(aztk.utils.constants.DEFAULT_SPARK_CONF_SOURCE, file)):
return os.path.join(aztk.utils.constants.DEFAULT_SPARK_CONF_SOURCE, file)
else:
if os.path.exists(os.path.join(aztk.utils.constants.GLOBAL_CONFIG_PATH, file)):
return os.path.join(aztk.utils.constants.GLOBAL_CONFIG_PATH, file)

jars = spark_defaults_conf = spark_env_sh = core_site_xml = None

# try load global
try:
jars_src = os.path.join(aztk.utils.constants.GLOBAL_CONFIG_PATH, 'jars')
jars = [os.path.join(jars_src, jar) for jar in os.listdir(jars_src)]
if os.path.exists(os.path.join(aztk.utils.constants.GLOBAL_CONFIG_PATH, 'spark-defaults.conf')):
spark_defaults_conf = os.path.join(aztk.utils.constants.GLOBAL_CONFIG_PATH, 'spark-defaults.conf')
if os.path.exists(os.path.join(aztk.utils.constants.GLOBAL_CONFIG_PATH, 'spark-env.sh')):
spark_env_sh = os.path.join(aztk.utils.constants.GLOBAL_CONFIG_PATH, 'spark-env.sh')
if os.path.exists(os.path.join(aztk.utils.constants.GLOBAL_CONFIG_PATH, 'core-site.xml')):
core_site_xml = os.path.join(aztk.utils.constants.GLOBAL_CONFIG_PATH, 'core-site.xml')
except FileNotFoundError:
pass

spark_defaults_conf = get_file_if_exists('spark-defaults.conf', False)
spark_env_sh = get_file_if_exists('spark-env.sh', False)
core_site_xml = get_file_if_exists('core-site.xml', False)

# try load local, overwrite if found
try:
jars_src = os.path.join(aztk.utils.constants.DEFAULT_SPARK_CONF_SOURCE, 'jars')
jars = [os.path.join(jars_src, jar) for jar in os.listdir(jars_src)]
if os.path.exists(os.path.join(aztk.utils.constants.DEFAULT_SPARK_CONF_SOURCE, 'spark-defaults.conf')):
spark_defaults_conf = os.path.join(aztk.utils.constants.DEFAULT_SPARK_CONF_SOURCE, 'spark-defaults.conf')
if os.path.exists(os.path.join(aztk.utils.constants.DEFAULT_SPARK_CONF_SOURCE, 'spark-env.sh')):
spark_env_sh = os.path.join(aztk.utils.constants.DEFAULT_SPARK_CONF_SOURCE, 'spark-env.sh')
if os.path.exists(os.path.join(aztk.utils.constants.DEFAULT_SPARK_CONF_SOURCE, 'core-site.xml')):
core_site_xml = os.path.join(aztk.utils.constants.DEFAULT_SPARK_CONF_SOURCE, 'core-site.xml')
except FileNotFoundError:
pass

spark_defaults_conf = get_file_if_exists('spark-defaults.conf', True)
spark_env_sh = get_file_if_exists('spark-env.sh', True)
core_site_xml = get_file_if_exists('core-site.xml', True)

return aztk.spark.models.SparkConfiguration(
spark_defaults_conf=spark_defaults_conf,
jars=jars,
Expand Down

0 comments on commit 1d9d9ca

Please sign in to comment.