diff --git a/fink_mm/__init__.py b/fink_mm/__init__.py index a9f9f426..338734d1 100644 --- a/fink_mm/__init__.py +++ b/fink_mm/__init__.py @@ -12,6 +12,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.16.11" +__version__ = "0.16.12" __distribution_schema_version__ = "1.3" __observatory_schema_version__ = "1.1" diff --git a/fink_mm/conftest.py b/fink_mm/conftest.py index af65e817..13bb6ed2 100644 --- a/fink_mm/conftest.py +++ b/fink_mm/conftest.py @@ -19,6 +19,7 @@ @pytest.fixture(autouse=True) def init_test(doctest_namespace): + doctest_namespace["os"] = os doctest_namespace["pd"] = pandas doctest_namespace["tempfile"] = tempfile doctest_namespace["assert_frame_equal"] = assert_frame_equal diff --git a/fink_mm/utils/fun_utils.py b/fink_mm/utils/fun_utils.py index 3271b8c5..6dd99302 100644 --- a/fink_mm/utils/fun_utils.py +++ b/fink_mm/utils/fun_utils.py @@ -860,8 +860,13 @@ def read_and_build_spark_submit(config, logger): -------- >>> config = get_config({"--config" : "fink_mm/conf/fink_mm.conf"}) >>> logger = init_logging() - >>> read_and_build_spark_submit(config, logger) - "if test -f '~/.bash_profile'; then source ~/.bash_profile; fi; `which spark-submit` --master local[2] --conf spark.mesos.principal= --conf spark.mesos.secret= --conf spark.mesos.role= --conf spark.executorEnv.HOME=/path/to/user/ --driver-memory 4G --executor-memory 8G --conf spark.cores.max=16 --conf spark.executor.cores=8" + >>> spark_str = read_and_build_spark_submit(config, logger) + + >>> home_path = os.environ["HOME"] + >>> path_bash_profile = os.path.join(home_path, ".bash_profile") + >>> test_str = f"if test -f '{path_bash_profile}'; then source {path_bash_profile}; fi; `which spark-submit` --master local[2] --conf spark.mesos.principal= --conf spark.mesos.secret= --conf spark.mesos.role= --conf spark.executorEnv.HOME=/path/to/user/ --driver-memory 4G --executor-memory 8G --conf spark.cores.max=16 --conf spark.executor.cores=8" + >>> test_str == spark_str + True """ try: master_manager = config["STREAM"]["manager"] @@ -877,8 +882,11 @@ def read_and_build_spark_submit(config, logger): logger.error("Spark Admin config entry not found \n\t {}".format(e)) exit(1) - spark_submit = "if test -f '~/.bash_profile'; then \ - source ~/.bash_profile; fi; \ + home_path = os.environ["HOME"] + path_bash_profile = os.path.join(home_path, ".bash_profile") + + spark_submit = "if test -f '{}'; then \ + source {}; fi; \ `which spark-submit` \ --master {} \ --conf spark.mesos.principal={} \ @@ -889,6 +897,8 @@ def read_and_build_spark_submit(config, logger): --executor-memory {}G \ --conf spark.cores.max={} \ --conf spark.executor.cores={}".format( + path_bash_profile, + path_bash_profile, master_manager, principal_group, secret,