From 53d63a46c8cf0dfda5fbb87d1572cd53302a21ba Mon Sep 17 00:00:00 2001 From: Krishnan R Date: Tue, 1 Aug 2017 22:29:02 +0530 Subject: [PATCH] Changes --- .travis.yml | 9 +- extension/sparkmonitor/serverextension.py | 6 +- extension/sparkmonitor/sparkmonitor.py | 5 +- notebooks/swantest.ipynb | 166 --------------------- swanimage.Dockerfile | 16 -- systemuser.sh | 172 ---------------------- 6 files changed, 13 insertions(+), 361 deletions(-) delete mode 100644 notebooks/swantest.ipynb delete mode 100644 swanimage.Dockerfile delete mode 100644 systemuser.sh diff --git a/.travis.yml b/.travis.yml index e1ad6174..9cb682e1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -36,9 +36,9 @@ after_success: - cd $TRAVIS_BUILD_DIR/ - 'docker login -u=$DOCKER_USER -p=$DOCKER_PASS && docker build -f Dockerfile -t krishnanr/sparkmonitor . && docker push krishnanr/sparkmonitor' -- 'docker build -f swanimage.Dockerfile -t krishnanr/sparkmonitorhub . && docker push krishnanr/sparkmonitorhub' + deploy: - + - provider: releases skip_cleanup: true file: @@ -54,7 +54,6 @@ deploy: on: tags: true script: 'docker tag krishnanr/sparkmonitor krishnanr/sparkmonitor:$TRAVIS_TAG && - docker push krishnanr/sparkmonitor:$TRAVIS_TAG && - docker tag krishnanr/sparkmonitorhub krishnanr/sparkmonitorhub:$TRAVIS_TAG && - docker push krishnanr/sparkmonitorhub:$TRAVIS_TAG' + docker push krishnanr/sparkmonitor:$TRAVIS_TAG + \ No newline at end of file diff --git a/extension/sparkmonitor/serverextension.py b/extension/sparkmonitor/serverextension.py index f9d069b5..1ca6fccf 100644 --- a/extension/sparkmonitor/serverextension.py +++ b/extension/sparkmonitor/serverextension.py @@ -1,3 +1,7 @@ +# SparkMonitor Jupyter Serverextension +# This module adds a request handler to jupyter webserver. It proxies the spark UI assumed to be running at 127.0.0.1:4040 +# to the endpoint [notebook rooturl]/sparkmonitor +# TODO Create unique endpoints for different kernels or spark applications. from notebook.base.handlers import IPythonHandler @@ -54,7 +58,7 @@ def handle_response(self, response): self.write(content) self.finish() - +# Called when the serverextension is loaded def load_jupyter_server_extension(nb_server_app): """ Called when the extension is loaded. diff --git a/extension/sparkmonitor/sparkmonitor.py b/extension/sparkmonitor/sparkmonitor.py index f7690b19..a05fb70c 100644 --- a/extension/sparkmonitor/sparkmonitor.py +++ b/extension/sparkmonitor/sparkmonitor.py @@ -111,7 +111,8 @@ def load_ipython_extension(ipython): logger = logging.getLogger('sparkscalamonitor') logger.setLevel(logging.DEBUG) logger.propagate = False - fh = logging.FileHandler('scalamonitorextension.log') # ,mode='w') + # For debugging this module - Writes logs to a file + fh = logging.FileHandler('sparkmonitor_kernelextension.log',mode='w') fh.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(levelname)s: %(asctime)s - %(name)s - %(process)d - %(processName)s - \ @@ -138,7 +139,9 @@ def load_ipython_extension(ipython): # Injecting conf into users namespace if(spark_imported): conf = ipython.user_ns.get('conf') + if(conf): + logger.info("Conf: "+conf.toDebugString()) if(isinstance(conf, SparkConf)): configure(conf) else: diff --git a/notebooks/swantest.ipynb b/notebooks/swantest.ipynb deleted file mode 100644 index 8c186fcf..00000000 --- a/notebooks/swantest.ipynb +++ /dev/null @@ -1,166 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Testing SparkMonitor" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(conf.toDebugString())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import getpass\n", - "import os, sys\n", - "\n", - "print(\"Please enter your password\")\n", - "ret = os.system(\"echo \\\"%s\\\" | kinit\" % getpass.getpass())\n", - "\n", - "if ret == 0: print(\"Credentials created successfully\")\n", - "else: sys.stderr.write('Error creating credentials, return code: %s\\n' % ret)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "from pyspark import SparkContext\n", - "from pyspark.sql import SparkSession" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "conf.set('spark.driver.host', os.environ['SERVER_HOSTNAME'])\n", - "conf.set('spark.driver.port', os.environ['SPARK_PORT_1'])\n", - "conf.set('spark.fileserver.port', os.environ['SPARK_PORT_2'])\n", - "conf.set('spark.blockManager.port', os.environ['SPARK_PORT_3'])\n", - "conf.set('spark.ui.port', os.environ['SPARK_PORT_4'])\n", - "conf.set('spark.master', 'yarn')\n", - "#conf.set('spark.jars', '{lcgview}/lib/accsoft/accsoft-nxcals-data-access-0.1.4.jar,{lcgview}/lib/accsoft/dependency/accsoft-nxcals-common-0.1.4.jar,{lcgview}/lib/accsoft/dependency/accsoft-nxcals-service-client-0.1.4.jar,{lcgview}/lib/accsoft/dependency/activation-1.1.jar,{lcgview}/lib/accsoft/dependency/apacheds-i18n-2.0.0-M15.jar,{lcgview}/lib/accsoft/dependency/apacheds-kerberos-codec-2.0.0-M15.jar,{lcgview}/lib/accsoft/dependency/api-asn1-api-1.0.0-M20.jar,{lcgview}/lib/accsoft/dependency/api-util-1.0.0-M20.jar,{lcgview}/lib/accsoft/dependency/aspectjrt-1.8.10.jar,{lcgview}/lib/accsoft/dependency/aspectjweaver-1.8.10.jar,{lcgview}/lib/accsoft/dependency/avro-1.8.1.jar,{lcgview}/lib/accsoft/dependency/commons-beanutils-1.7.0.jar,{lcgview}/lib/accsoft/dependency/commons-beanutils-core-1.8.0.jar,{lcgview}/lib/accsoft/dependency/commons-cli-1.2.jar,{lcgview}/lib/accsoft/dependency/commons-codec-1.10.jar,{lcgview}/lib/accsoft/dependency/commons-collections-3.2.2.jar,{lcgview}/lib/accsoft/dependency/commons-compress-1.8.1.jar,{lcgview}/lib/accsoft/dependency/commons-configuration-1.6.jar,{lcgview}/lib/accsoft/dependency/commons-daemon-1.0.13.jar,{lcgview}/lib/accsoft/dependency/commons-digester-1.8.jar,{lcgview}/lib/accsoft/dependency/commons-el-1.0.jar,{lcgview}/lib/accsoft/dependency/commons-httpclient-3.1.jar,{lcgview}/lib/accsoft/dependency/commons-io-2.5.jar,{lcgview}/lib/accsoft/dependency/commons-lang-2.6.jar,{lcgview}/lib/accsoft/dependency/commons-lang3-3.5.jar,{lcgview}/lib/accsoft/dependency/commons-logging-1.2.jar,{lcgview}/lib/accsoft/dependency/commons-math-2.1.jar,{lcgview}/lib/accsoft/dependency/commons-math3-3.4.1.jar,{lcgview}/lib/accsoft/dependency/commons-net-2.2.jar,{lcgview}/lib/accsoft/dependency/commons-pool2-2.4.2.jar,{lcgview}/lib/accsoft/dependency/config-1.3.1.jar,{lcgview}/lib/accsoft/dependency/curator-client-2.7.1.jar,{lcgview}/lib/accsoft/dependency/curator-framework-2.7.1.jar,{lcgview}/lib/accsoft/dependency/curator-recipes-2.4.0.jar,{lcgview}/lib/accsoft/dependency/disruptor-3.3.0.jar,{lcgview}/lib/accsoft/dependency/findbugs-annotations-1.3.9-1.jar,{lcgview}/lib/accsoft/dependency/gson-2.2.4.jar,{lcgview}/lib/accsoft/dependency/guava-16.0.jar,{lcgview}/lib/accsoft/dependency/hadoop-annotations-2.6.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hadoop-auth-2.6.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hadoop-common-2.6.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hadoop-hdfs-2.6.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hadoop-hdfs-2.6.0-cdh5.7.5-tests.jar,{lcgview}/lib/accsoft/dependency/hamcrest-core-1.3.jar,{lcgview}/lib/accsoft/dependency/hbase-annotations-1.2.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hbase-client-1.2.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hbase-common-1.2.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hbase-common-1.2.0-cdh5.7.5-tests.jar,{lcgview}/lib/accsoft/dependency/hbase-hadoop2-compat-1.2.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hbase-hadoop-compat-1.2.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hbase-prefix-tree-1.2.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hbase-procedure-1.2.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hbase-protocol-1.2.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/hbase-server-1.2.0-cdh5.7.5.jar,{lcgview}/lib/accsoft/dependency/high-scale-lib-1.1.1.jar,{lcgview}/lib/accsoft/dependency/hortonworks-shc-core-cern-1.0.3-2.1-s_2.11-CERN.jar,{lcgview}/lib/accsoft/dependency/htrace-core-3.2.0-incubating.jar,{lcgview}/lib/accsoft/dependency/htrace-core4-4.0.1-incubating.jar,{lcgview}/lib/accsoft/dependency/httpclient-4.5.2.jar,{lcgview}/lib/accsoft/dependency/httpcore-4.4.4.jar,{lcgview}/lib/accsoft/dependency/jackson-annotations-2.6.0.jar,{lcgview}/lib/accsoft/dependency/jackson-core-2.6.5.jar,{lcgview}/lib/accsoft/dependency/jackson-core-asl-1.9.13.jar,{lcgview}/lib/accsoft/dependency/jackson-databind-2.6.5.jar,{lcgview}/lib/accsoft/dependency/jackson-jaxrs-1.8.8.jar,{lcgview}/lib/accsoft/dependency/jackson-mapper-asl-1.9.13.jar,{lcgview}/lib/accsoft/dependency/jackson-xc-1.8.3.jar,{lcgview}/lib/accsoft/dependency/jamon-runtime-2.4.1.jar,{lcgview}/lib/accsoft/dependency/jasper-compiler-5.5.23.jar,{lcgview}/lib/accsoft/dependency/jasper-runtime-5.5.23.jar,{lcgview}/lib/accsoft/dependency/javax.servlet-api-3.1.0.jar,{lcgview}/lib/accsoft/dependency/jaxb-api-2.2.2.jar,{lcgview}/lib/accsoft/dependency/jaxb-impl-2.2.3-1.jar,{lcgview}/lib/accsoft/dependency/jcodings-1.0.8.jar,{lcgview}/lib/accsoft/dependency/jersey-core-1.9.jar,{lcgview}/lib/accsoft/dependency/jersey-json-1.9.jar,{lcgview}/lib/accsoft/dependency/jersey-server-1.9.jar,{lcgview}/lib/accsoft/dependency/jets3t-0.7.1.jar,{lcgview}/lib/accsoft/dependency/jettison-1.1.jar,{lcgview}/lib/accsoft/dependency/jetty-6.1.26.cloudera.4.jar,{lcgview}/lib/accsoft/dependency/jetty-sslengine-6.1.26.cloudera.4.jar,{lcgview}/lib/accsoft/dependency/jetty-util-6.1.26.cloudera.4.jar,{lcgview}/lib/accsoft/dependency/joni-2.1.2.jar,{lcgview}/lib/accsoft/dependency/jsch-0.1.42.jar,{lcgview}/lib/accsoft/dependency/jsp-2.1-6.1.14.jar,{lcgview}/lib/accsoft/dependency/jsp-api-2.1-6.1.14.jar,{lcgview}/lib/accsoft/dependency/leveldbjni-all-1.8.jar,{lcgview}/lib/accsoft/dependency/log4j-1.2-api-2.6.2.jar,{lcgview}/lib/accsoft/dependency/log4j-api-2.6.2.jar,{lcgview}/lib/accsoft/dependency/log4j-core-2.6.2.jar,{lcgview}/lib/accsoft/dependency/log4j-slf4j-impl-2.6.2.jar,{lcgview}/lib/accsoft/dependency/metrics-core-2.2.0.jar,{lcgview}/lib/accsoft/dependency/netty-all-4.0.23.Final.jar,{lcgview}/lib/accsoft/dependency/paranamer-2.7.jar,{lcgview}/lib/accsoft/dependency/protobuf-java-2.5.0.jar,{lcgview}/lib/accsoft/dependency/scala-library-2.11.8.jar,{lcgview}/lib/accsoft/dependency/slf4j-api-1.7.21.jar,{lcgview}/lib/accsoft/dependency/snappy-java-1.1.1.3.jar,{lcgview}/lib/accsoft/dependency/stax-api-1.0-2.jar,{lcgview}/lib/accsoft/dependency/xmlenc-0.52.jar,{lcgview}/lib/accsoft/dependency/xz-1.5.jar,{lcgview}/lib/accsoft/dependency/zookeeper-3.4.5-cdh5.7.5.jar'.format(lcgview = os.environ['LCG_VIEW']))\n", - "conf.set('spark.driver.extraJavaOptions', '-Dservice.url=http://cs-ccr-nxcalsstr1.cern.ch:19093')\n", - "conf.set('spark.driver.memory', '8g')\n", - "sc = SparkContext(conf = conf)\n", - "spark = SparkSession(sc)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "sc.parallelize(range(1,1000)).count()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "import time\n", - "b=sc.broadcast([3,5]) #Creating a broadcast variable available on all executors\n", - "a=sc.accumulator(0) #Creating an accumulator for adding values across executors\n", - "RDD0=sc.parallelize([y for y in range(0,11)]) #RDD from input python collection\n", - "RDD2=sc.parallelize([z for z in range(0,10)])\n", - "RDD1=RDD0.cartesian(RDD2) \n", - "cached=RDD2.cache() #Testing cached RDD\n", - "RDD22=RDD1.map(lambda x:x[0]+x[1]+b.value[0])\n", - "RDD3=RDD22.repartition(7) # To trigger a new stage.\n", - "RDD4=RDD2.map(lambda x: 3*x-b.value[0])\n", - "RDD5=RDD3.filter(lambda x:x%2==0)\n", - "RDD6=RDD4.filter(lambda x:x%2!=0)\n", - "RDD7=RDD5.cartesian(RDD6)\n", - "RDD8=RDD7.flatMap(lambda x: [x[i] for i in range(0,2)])\n", - "RDD9=RDD8.union(cached)\n", - "ans=RDD9.reduce(lambda x,y: x+y) # Doing a simple sum on the random data.\n", - "print(ans)\n", - "def f(x):\n", - " global a\n", - " time.sleep(0.1) #Making the job run a little longer\n", - " # print(1/0)\n", - " a+=x\n", - "RDD9.foreach(f)\n", - "print(a.value)\n", - "\n", - "#Display should appear automatically" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "sc.parallelize(range(0,100)).count()\n", - "sc.parallelize(range(0,200)).count()\n", - "sc.parallelize(range(0,300)).count()\n", - "sc.parallelize(range(0,400)).count()\n", - "sc.parallelize(range(0,500)).count()\n", - "sc.parallelize(range(0,600)).count()\n", - "sc.parallelize(range(0,700)).count()\n", - "sc.parallelize(range(0,800)).count()\n", - "sc.parallelize(range(0,900)).count()\n", - "sc.parallelize(range(0,1000)).count()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [ - "sc.stop()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [default]", - "language": "python", - "name": "python2" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 2 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.13" - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} diff --git a/swanimage.Dockerfile b/swanimage.Dockerfile deleted file mode 100644 index df765116..00000000 --- a/swanimage.Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM cernphsft/systemuser:test - -#Possible fix for serverextension not loading?? -RUN sudo pip3 install jupyter_nbextensions_configurator - -ADD ./extension/ /extension/ -ADD ./notebooks/ /notebooks/ - -RUN which pip3; pip3 install /extension/ -RUN pip2 install /extension/ - -#RUN /usr/local/bin/jupyter serverextension enable sparkmonitor --user --py -#RUN sudo /usr/local/bin/jupyter serverextension enable sparkmonitor --py -#RUN sudo /usr/local/bin/jupyter serverextension enable sparkmonitor --sys-prefix --py - -ADD systemuser.sh /srv/singleuser/systemuser.sh \ No newline at end of file diff --git a/systemuser.sh b/systemuser.sh deleted file mode 100644 index 69fdf42f..00000000 --- a/systemuser.sh +++ /dev/null @@ -1,172 +0,0 @@ -#!/bin/sh - -# Author: Danilo Piparo, Enric Tejedor 2016 -# Copyright CERN -# Here the environment for the notebook server is prepared. Many of the commands are launched as regular -# user as it's this entity which is able to access eos and not the super user. - -# Create notebook user -# The $HOME directory is specified upstream in the Spawner -echo "Creating user $USER ($USER_ID) with home $HOME" -export SWAN_HOME=$HOME -if [[ $SWAN_HOME == /eos/user/* ]]; then export CERNBOX_HOME=$SWAN_HOME; fi -useradd -u $USER_ID -s $SHELL -d $SWAN_HOME $USER -#mkdir -p $SWAN_HOME/SWAN_projects/ -SCRATCH_HOME=/scratch/$USER -mkdir -p $SCRATCH_HOME -echo "This directory is temporary and will be deleted when your SWAN session ends!" > $SCRATCH_HOME/IMPORTANT.txt -chown -R $USER:$USER $SCRATCH_HOME - -# Setup the LCG View on CVMFS -echo "Setting up environment from CVMFS" -export LCG_VIEW=$ROOT_LCG_VIEW_PATH/$ROOT_LCG_VIEW_NAME/$ROOT_LCG_VIEW_PLATFORM - -# Set environment for the Jupyter process -echo "Setting Jupyter environment" -JPY_DIR=$SCRATCH_HOME/.jupyter -mkdir -p $JPY_DIR -JPY_LOCAL_DIR=$SCRATCH_HOME/.local -mkdir -p $JPY_LOCAL_DIR -export JUPYTER_CONFIG_DIR=$JPY_DIR -# Our kernels will be in $JPY_LOCAL_DIR/share/jupyter, $LCG_VIEW/share/jupyter is needed for the notebook extensions -export JUPYTER_PATH=$JPY_LOCAL_DIR/share/jupyter:$LCG_VIEW/share/jupyter -export KERNEL_DIR=$JPY_LOCAL_DIR/share/jupyter/kernels -mkdir -p $KERNEL_DIR -export JUPYTER_RUNTIME_DIR=$JPY_LOCAL_DIR/share/jupyter/runtime -export IPYTHONDIR=$SCRATCH_HOME/.ipython -# This avoids to create hardlinks on eos when using pip -export XDG_CACHE_HOME=/tmp/$USER/.cache/ -JPY_CONFIG=$JUPYTER_CONFIG_DIR/jupyter_notebook_config.py -echo "c.FileCheckpoints.checkpoint_dir = '$SCRATCH_HOME/.ipynb_checkpoints'" >> $JPY_CONFIG -echo "c.NotebookNotary.db_file = '$JPY_LOCAL_DIR/share/jupyter/nbsignatures.db'" >> $JPY_CONFIG -echo "c.NotebookNotary.secret_file = '$JPY_LOCAL_DIR/share/jupyter/notebook_secret'" >> $JPY_CONFIG -#echo "c.NotebookApp.extra_template_paths = ['/srv/singleuser/swan-templates']" >> $JPY_CONFIG -#echo "c.NotebookApp.contents_manager_class = 'swancontents.swanfilemanager.SwanFileManager'" >> $JPY_CONFIG -cp -L -r $LCG_VIEW/etc/jupyter/* $JUPYTER_CONFIG_DIR - -# Configure %%cpp cell highlighting -CUSTOM_JS_DIR=$JPY_DIR/custom -mkdir $CUSTOM_JS_DIR -echo " -require(['notebook/js/codecell'], function(codecell) { - codecell.CodeCell.options_default.highlight_modes['magic_text/x-c++src'] = {'reg':[/^%%cpp/]}; -}); -" > $CUSTOM_JS_DIR/custom.js - -# Configure kernels and terminal -# The environment of the kernels and the terminal will combine the view and the user script (if any) -echo "Configuring kernels and terminal" -# Python (2 or 3) -if [ -f $LCG_VIEW/bin/python3 ]; then PYVERSION=3; else PYVERSION=2; fi -PYKERNELDIR=$KERNEL_DIR/python$PYVERSION -cp -r /usr/local/share/jupyter/kernelsBACKUP/python2 $PYKERNELDIR -echo "{ - \"display_name\": \"Python $PYVERSION\", - \"language\": \"python\", - \"argv\": [ - \"python$PYVERSION\", - \"-m\", - \"ipykernel\", - \"-f\", - \"{connection_file}\" - ] -}" > $PYKERNELDIR/kernel.json -# ROOT -cp -rL $LCG_VIEW/etc/notebook/kernels/root $KERNEL_DIR -sed -i "s/python/python$PYVERSION/g" $KERNEL_DIR/root/kernel.json # Set Python version in kernel -# R -cp -rL $LCG_VIEW/share/jupyter/kernels/* $KERNEL_DIR -sed -i "s/IRkernel::main()/options(bitmapType='cairo');IRkernel::main()/g" $KERNEL_DIR/ir/kernel.json # Force cairo for graphics - -chown -R $USER:$USER $JPY_DIR $JPY_LOCAL_DIR -export SWAN_ENV_FILE=/tmp/swan.sh -sudo -E -u $USER sh -c ' source $LCG_VIEW/setup.sh \ - && if [[ $SPARK_CLUSTER_NAME ]]; \ - then \ - echo "Configuring environment for Spark cluster: $SPARK_CLUSTER_NAME"; \ - source $SPARK_CONFIG_SCRIPT $SPARK_CLUSTER_NAME; \ - export SPARK_LOCAL_IP=`hostname -i`; \ - wget -P $SWAN_HOME https://raw.githubusercontent.com/etejedor/Spark-Notebooks/master/SWAN-Spark_NXCALS_Example.ipynb; \ - export PYTHONPATH=/scratch/$USER/lib:$PYTHONPATH; \ - mkdir -p /scratch/$USER/lib; \ - cp -r /usr/local/lib/python2.7/site-packages/ipykernel /scratch/$USER/lib; \ - cp -r /usr/local/lib/python2.7/site-packages/IPython /scratch/$USER/lib; \ - - cp -r /usr/local/lib/python2.7/site-packages/sparkmonitor /scratch/$USER/lib; \ - cp -r /usr/local/lib/python2.7/site-packages/bs4 /scratch/$USER/lib; \ - cp -r /notebooks/ $SWAN_HOME; \ - jupyter nbextension install --symlink --user --py sparkmonitor; \ - jupyter nbextension enable --user --py sparkmonitor; \ - ipython profile create; \ - echo "c.InteractiveShellApp.extensions.append('\''sparkmonitor'\'')" >> $(ipython profile locate default)/ipython_kernel_config.py; \ - fi \ - && export JUPYTER_DATA_DIR=$LCG_VIEW/share/jupyter \ - && export TMP_SCRIPT=`mktemp` \ - && if [[ $USER_ENV_SCRIPT && -f `eval echo $USER_ENV_SCRIPT` ]]; \ - then \ - echo "Found user script: $USER_ENV_SCRIPT"; \ - export TMP_SCRIPT=`mktemp`; \ - cat `eval echo $USER_ENV_SCRIPT` > $TMP_SCRIPT; \ - source $TMP_SCRIPT; \ - else \ - echo "Cannot find user script: $USER_ENV_SCRIPT"; \ - fi \ - && cd $KERNEL_DIR \ - && python -c "import os; kdirs = os.listdir(\"./\"); \ - kfile_names = [\"%s/kernel.json\" %kdir for kdir in kdirs]; \ - kfile_contents = [open(kfile_name).read() for kfile_name in kfile_names]; \ - exec(\"def addEnv(dtext): d=eval(dtext); d[\\\"env\\\"]=dict(os.environ); return d\"); \ - kfile_contents_mod = map(addEnv, kfile_contents); \ - import json; \ - print kfile_contents_mod; \ - map(lambda d: open(d[0],\"w\").write(json.dumps(d[1])), zip(kfile_names,kfile_contents_mod)); \ - termEnvFile = open(\"$SWAN_ENV_FILE\", \"w\"); \ - [termEnvFile.write(\"export %s=\\\"%s\\\"\\n\" % (key, val)) if key != \"SUDO_COMMAND\" else None for key, val in dict(os.environ).iteritems()];"' - - -# Spark configuration -if [[ $SPARK_CLUSTER_NAME ]] -then - LOCAL_IP=`hostname -i` - echo "$LOCAL_IP $SERVER_HOSTNAME" >> /etc/hosts -fi - -# Make sure we have a sane terminal -printf "export TERM=xterm\n" >> $SWAN_ENV_FILE - -# If there, source users' .bashrc after the SWAN environment -BASHRC_LOCATION=$SWAN_HOME/.bashrc -printf "if [[ -f $BASHRC_LOCATION ]]; -then - source $BASHRC_LOCATION -fi\n" >> $SWAN_ENV_FILE - -if [ $? -ne 0 ] -then - echo "Error setting the environment for kernels" - exit 1 -fi - -# Set the terminal environment -export SWAN_BASH=/bin/swan_bash -printf "#! /bin/env python\nfrom subprocess import call\nimport sys\ncall([\"bash\", \"--rcfile\", \"$SWAN_ENV_FILE\"]+sys.argv[1:])\n" >> $SWAN_BASH -chmod +x $SWAN_BASH - -#echo "--------------------------------" -#jupyter nbextension install /srv/singleuser/jupyter-share/ --sys-prefix -#jupyter nbextension enable jupyter-share/notebook --sys-prefix -#cd /srv/singleuser/swan-contents -#python3 setup.py install -#echo "--------------------------------" - -# Run notebook server -echo "Running the notebook server" -sudo -E -u $USER sh -c ' cd $SWAN_HOME \ - && SHELL=$SWAN_BASH jupyterhub-singleuser \ - --port=8888 \ - --ip=0.0.0.0 \ - --user=$JPY_USER \ - --cookie-name=$JPY_COOKIE_NAME \ - --base-url=$JPY_BASE_URL \ - --hub-prefix=$JPY_HUB_PREFIX \ - --hub-api-url=$JPY_HUB_API_URL'