diff --git a/.gitignore b/.gitignore index 76a46f9944..05e34d377e 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,4 @@ /notes /jsdoc/template/tmpl /jsdoc/template/static +/images/* diff --git a/.travis.yml b/.travis.yml index 25fb64a47c..b63ffc0cfa 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,12 +23,11 @@ cache: directories: - node_modules - "$HOME/cache" - - "$TRAVIS_BUILD_DIR/_build/ExternalData" + - "$TRAVIS_BUILD_DIR/dist/data" before_install: # Start xvfb with a specific resolution and pixel depth - "/sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -ac -screen 0 1280x1024x24" - - CACHE="${HOME}/cache" CMAKE_VERSION=3.5.0 CMAKE_SHORT_VERSION=3.5 source ./scripts/install_cmake.sh - npm install -g npm@latest # Prune the npm packages. If this fails for any reason, just remove them all - npm prune || rm -r node_modules @@ -43,37 +42,27 @@ install: - npm list || true # Install for the website build - npm run setup-website - # Download and extract the glslangValidator tool - - curl -OL https://github.com/KhronosGroup/glslang/releases/download/master-tot/glslang-master-linux-Release.zip - - unzip glslang-master-linux-Release.zip bin/glslangValidator before_script: - export DISPLAY=:99.0 script: - # Use the glslangValidator to validate all .frag and .vert files - - find . \( -name '*.frag' \) -exec sh -c 'for n; do python scripts/preprocess_glsl.py "$n" | bin/glslangValidator --stdin -S frag || exit 1; done' sh {} \+ - - find . \( -name '*.vert' \) -exec sh -c 'for n; do python scripts/preprocess_glsl.py "$n" | bin/glslangValidator --stdin -S vert || exit 1; done' sh {} \+ - - npm run build - - npm run docs - - mkdir -p _build - - ctest -S cmake/travis_build.cmake -VV || true - - if [ -f _build/test_failed ] ; then false ; fi + - npm run ci # Build the website to ensure that it will pass - - npm run build-website + - npm run ci-build-website after_failure: # Upload test results. First make them smaller with optipng. - pip install --user --upgrade 'girder-client<3.1' requests[security] pyOpenSSL six - - find _build/images -name '*-test.png' -exec optipng {} \+ || true - - find _build/images -name '*-test.png' -exec python scripts/upload_travis_results.py {} \+ || true + - find images -name '*-test.png' -exec optipng {} \+ || true + - find images -name '*-test.png' -exec python scripts/upload_travis_results.py {} \+ || true # Upload build artifacts - find dist/built -type f -exec python scripts/upload_travis_results.py {} \+ || true # Generate a new set of baseline images, in case we decide the new results # are now correct - rm -r dist/data/base-images 2>/dev/null >/dev/null || true - - python tests/runners/baseline_images.py -cgvb _build - - find _build -name 'Baseline Images*.tgz' -exec python scripts/upload_travis_results.py {} \+ || true + - python tests/runners/baseline_images.py -cgvb images + - find images -name 'Baseline Images*.tgz' -exec python scripts/upload_travis_results.py {} \+ || true after_success: # - find dist \( -name '*coverage*xml' -o -name 'lcov.info' \) -exec ls -al {} \+ || true diff --git a/CMakeLists.txt b/CMakeLists.txt deleted file mode 100644 index 6e1c34909d..0000000000 --- a/CMakeLists.txt +++ /dev/null @@ -1,193 +0,0 @@ -cmake_minimum_required(VERSION 2.8) - -project(geojs NONE) - -include(CTest) -enable_testing() - -set(BUILD_TESTING ON CACHE BOOL "Enable geojs testing") -set(HEADLESS_TESTS ON CACHE BOOL "Generate headless browser unit tests.") -set(WEBGLHEADLESS_TESTS ON CACHE BOOL "Generate headless WebGL unit tests (requires xvfb to be running).") -set(TEST_SAVE_IMAGE "none" CACHE STRING "Save headless test images even if there aren't errors. Valid options are none, all, or the a comma-separated list of test names.") -set(ESLINT_TESTS ON CACHE BOOL "Generate eslint style tests for JS source files.") - -site_name(HOSTNAME) - -set(TESTING_HOST "${HOSTNAME}" CACHE STRING "The host to connect to for unit tests") -set(TESTING_PORT 30100 CACHE STRING "The port number to use for the testing web server") -mark_as_advanced(TESTING_PORT) - -list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake") - -set(NOTES_PATH "${CMAKE_CURRENT_BINARY_DIR}/build_notes.json") - -# set the path where webpack builds the sources -set(GEOJS_DEPLOY_DIR "${CMAKE_CURRENT_SOURCE_DIR}/dist") - -find_program(NPM_EXECUTABLE npm) -if(NOT NPM_EXECUTABLE) - message(FATAL_ERROR "npm is required for many tests") -endif() - -# Generate notes to send along with the test reports -add_test( - NAME "notes-reset" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "./tests/notes.js" "reset" "${CMAKE_CURRENT_BINARY_DIR}/notes" "${NOTES_PATH}" -) - -add_test( - NAME "notes-report" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "./tests/notes.js" "combine" "${CMAKE_CURRENT_BINARY_DIR}/notes" "${NOTES_PATH}" -) -set_property(TEST "notes-report" APPEND PROPERTY DEPENDS "notes-reset") - -add_test( - NAME "total-coverage" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "${NPM_EXECUTABLE}" run combine-coverage -) -set_property(TEST "notes-report" APPEND PROPERTY DEPENDS "notes-reset") - -# Use ExternalData to download all referenced files in tests/external-data -include(ExternalData) -set(ExternalData_URL_TEMPLATES "https://data.kitware.com/api/v1/file/hashsum/%(algo)/%(hash)/download") -set(ExternalData_BINARY_ROOT "${GEOJS_DEPLOY_DIR}/data") -set(ExternalData_SOURCE_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/tests/external-data") -# Get a list of files to download based on the name *.sha512 -file(GLOB data_files_list "${ExternalData_SOURCE_ROOT}/*.sha512") -# Obtain the files using ExternalData -foreach(data_file ${data_files_list}) - string(REGEX REPLACE "^(.*)\\.sha512$" "\\1" base_file "${data_file}") - ExternalData_Expand_Arguments(data_files tmp DATA{${base_file}}) -endforeach() -# Add a target to get the files -ExternalData_Add_Target(data_files) -# For any file that is a .tgz file, expand it after we get it -foreach(data_file ${data_files_list}) - string(REGEX REPLACE "^(.*)\\.sha512$" "\\1" base_file "${data_file}") - get_filename_component(base_file "${base_file}" NAME) - get_filename_component(base_fileext "${base_file}" EXT) - if("${base_fileext}" STREQUAL ".tgz") - get_filename_component(base_filename "${base_file}" NAME_WE) - add_custom_command( - TARGET data_files POST_BUILD - DEPENDS "${ExternalData_BINARY_ROOT}/${base_file}" - COMMAND ${CMAKE_COMMAND} -E make_directory "${ExternalData_BINARY_ROOT}/${base_filename}" - COMMAND ${CMAKE_COMMAND} -E chdir "${ExternalData_BINARY_ROOT}/${base_filename}" tar zxf "${ExternalData_BINARY_ROOT}/${base_file}" - ) - endif() -endforeach() -# Create a test that gets this target -add_test(NAME get_data_files COMMAND "${CMAKE_COMMAND}" --build "${CMAKE_BINARY_DIR}" --target data_files) - -if(HEADLESS_TESTS) - add_test( - NAME "headless" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "${NPM_EXECUTABLE}" run test - ) - set_property(TEST "headless" APPEND PROPERTY ENVIRONMENT "CTEST_NOTES_PATH=${CMAKE_CURRENT_BINARY_DIR}/notes") - set_property(TEST "headless" APPEND PROPERTY DEPENDS "notes-reset") - set_property(TEST "headless" APPEND PROPERTY DEPENDS "get_data_files") - set_property(TEST "notes-report" APPEND PROPERTY DEPENDS "headless") - set_property(TEST "total-coverage" APPEND PROPERTY DEPENDS "headless") - - add_test( - NAME "tutorials" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "${NPM_EXECUTABLE}" run tutorialsci - ) - set_property(TEST "tutorials" APPEND PROPERTY DEPENDS "build_tutorials") - set_property(TEST "total-coverage" APPEND PROPERTY DEPENDS "tutorials") -endif() - -add_custom_command(OUTPUT "${GEOJS_DEPLOY_DIR}/examples/bundle.js" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "${NPM_EXECUTABLE}" run build-examples - COMMENT "Build examples" - VERBATIM -) -add_custom_target(examples DEPENDS "${GEOJS_DEPLOY_DIR}/examples/bundle.js") -add_test(NAME build_examples COMMAND "${CMAKE_COMMAND}" --build ${CMAKE_BINARY_DIR} --target examples) -set_property(TEST "build_examples" APPEND PROPERTY DEPENDS "get_data_files") - -add_custom_command(OUTPUT "${GEOJS_DEPLOY_DIR}/tutorials/bundle.js" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "${NPM_EXECUTABLE}" run build-tutorials - COMMENT "Build tutorials" - VERBATIM -) -add_custom_target(tutorials DEPENDS "${GEOJS_DEPLOY_DIR}/tutorials/bundle.js") -add_test(NAME build_tutorials COMMAND "${CMAKE_COMMAND}" --build ${CMAKE_BINARY_DIR} --target tutorials) -set_property(TEST "build_tutorials" APPEND PROPERTY DEPENDS "get_data_files") - -if(WEBGLHEADLESS_TESTS) - add_test( - NAME "webglheadless" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "${NPM_EXECUTABLE}" run glci - ) - set_property(TEST "webglheadless" APPEND PROPERTY ENVIRONMENT "CTEST_IMAGE_PATH=${CMAKE_CURRENT_BINARY_DIR}/images") - set_property(TEST "webglheadless" APPEND PROPERTY ENVIRONMENT "TEST_SAVE_IMAGE=${TEST_SAVE_IMAGE}") - set_property(TEST "total-coverage" APPEND PROPERTY DEPENDS "webglheadless") - set_property(TEST "webglheadless" APPEND PROPERTY DEPENDS "get_data_files") - # We use some of the example images in the webglheadless tests - set_property(TEST "webglheadless" APPEND PROPERTY DEPENDS "build_examples") - - add_test( - NAME "headed" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "${NPM_EXECUTABLE}" run headedci - ) - set_property(TEST "headed" APPEND PROPERTY ENVIRONMENT "CTEST_IMAGE_PATH=${CMAKE_CURRENT_BINARY_DIR}/images") - set_property(TEST "headed" APPEND PROPERTY ENVIRONMENT "TEST_SAVE_IMAGE=${TEST_SAVE_IMAGE}") - set_property(TEST "total-coverage" APPEND PROPERTY DEPENDS "headed") - set_property(TEST "headed" APPEND PROPERTY DEPENDS "build_examples") - - configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/tests/runners/baseline_images.py" - "${CMAKE_CURRENT_BINARY_DIR}/test/baseline_images.py" - COPYONLY - ) -endif() - -add_custom_command(OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/base-images.tgz" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - # Make sure we have the data files. - COMMAND "${CMAKE_COMMAND}" --build ${CMAKE_BINARY_DIR} --target data_files - # But remove the old baselines - COMMAND rm -rf "${ExternalData_BINARY_ROOT}/base-images" - # Run the webglheadless test, asking to save all images - COMMAND TEST_SAVE_IMAGE=all "${NPM_EXECUTABLE}" run glci - # Build examples to make sure that they are available. - COMMAND "${NPM_EXECUTABLE}" run build-examples - # Run the headed test, asking to save all images - COMMAND TEST_SAVE_IMAGE=all "${NPM_EXECUTABLE}" run headedci - # If optipng is available, reduce the size of the images - # COMMAND bash -c "optipng '${CMAKE_CURRENT_BINARY_DIR}'/images/*.png || true" - COMMAND bash -c "find '${CMAKE_CURRENT_BINARY_DIR}'/images -name '*.png' -a -not -name '*-test.png' -a -not -name '*-diff.png' -a -not -name '*-base.png' -a -not -name '*-screen.png' -print0 | xargs -0 -n 1 -P 8 optipng || true" - # Make a tarball of all of the images - COMMAND tar -zcvf "${CMAKE_CURRENT_BINARY_DIR}/base-images.tgz" --exclude=*-test.png --exclude=*-diff.png --exclude=*-base.png --exclude=*-screen.png -C "${CMAKE_CURRENT_BINARY_DIR}/images" . - COMMENT "Create baseline images, then tar them into a single file" - VERBATIM -) - -add_custom_target(baseline_images DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/base-images.tgz") - -add_test(NAME baseline_images CONFIGURATIONS "baseline_images" COMMAND "${CMAKE_COMMAND}" --build ${CMAKE_BINARY_DIR} --target baseline_images) -set_property(TEST "baseline_images" APPEND PROPERTY ENVIRONMENT "CTEST_IMAGE_PATH=${CMAKE_CURRENT_BINARY_DIR}/images") - -if(${ESLINT_TESTS}) - add_test( - NAME "eslint" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "${NPM_EXECUTABLE}" "run" "lint" - ) - add_test( - NAME "puglint" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - COMMAND "${NPM_EXECUTABLE}" "run" "puglint" - ) -endif() # ESLINT_TESTS diff --git a/CTestConfig.cmake b/CTestConfig.cmake deleted file mode 100644 index 9bc45fd7cf..0000000000 --- a/CTestConfig.cmake +++ /dev/null @@ -1,20 +0,0 @@ -## This file should be placed in the root directory of your project. -## Then modify the CMakeLists.txt file in the root directory of your -## project to incorporate the testing dashboard. -## -## # The following are required to submit to the CDash dashboard: -## ENABLE_TESTING() -## INCLUDE(CTest) - -set(CTEST_PROJECT_NAME "geojs") -set(CTEST_NIGHTLY_START_TIME "00:00:00 EST") - -set(CTEST_DROP_METHOD "http") -set(CTEST_DROP_SITE "my.cdash.org") -set(CTEST_DROP_LOCATION "/submit.php?project=geojs") -set(CTEST_DROP_SITE_CDASH TRUE) - -if(DEFINED CTEST_BINARY_DIRECTORY AND NOT EXISTS "${CTEST_BINARY_DIRECTORY}/build_notes.json") - file(WRITE "${CTEST_BINARY_DIRECTORY}/build_notes.json" "") -endif() -set(CTEST_NOTES_FILES "${CTEST_BINARY_DIRECTORY}/build_notes.json") diff --git a/cmake/travis_build.cmake b/cmake/travis_build.cmake deleted file mode 100644 index c7010cde77..0000000000 --- a/cmake/travis_build.cmake +++ /dev/null @@ -1,26 +0,0 @@ -set(CTEST_SOURCE_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}") -set(CTEST_BINARY_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}/_build") - -include(${CTEST_SOURCE_DIRECTORY}/CTestConfig.cmake) -set(CTEST_SITE "Travis") -set(CTEST_BUILD_NAME "Linux-$ENV{TRAVIS_BRANCH}") -set(CTEST_CMAKE_GENERATOR "Unix Makefiles") -set(coverage_file "${CTEST_SOURCE_DIRECTORY}/dist/cobertura/cobertura-coverage.xml") - -ctest_start("Continuous") -ctest_configure() -ctest_build() -ctest_test(PARALLEL_LEVEL 1 RETURN_VALUE res) -if(EXISTS "${coverage_file}") - file(COPY "${coverage_file}" DESTINATION "${CTEST_BINARY_DIRECTORY}") - file(RENAME ${CTEST_BINARY_DIRECTORY}/cobertura-coverage.xml ${CTEST_BINARY_DIRECTORY}/coverage.xml) - ctest_coverage() - file(REMOVE ${CTEST_BINARY_DIRECTORY}/coverage.xml) -endif() -ctest_submit() - -file(REMOVE "${CTEST_BINARY_DIRECTORY}/test_failed") -if(NOT res EQUAL 0) - file(WRITE "${CTEST_BINARY_DIRECTORY}/test_failed" "error") - message(FATAL_ERROR "Test failures occurred.") -endif() diff --git a/dashboard/github_service/README.md b/dashboard/github_service/README.md deleted file mode 100644 index 0f3f1f4635..0000000000 --- a/dashboard/github_service/README.md +++ /dev/null @@ -1,82 +0,0 @@ -Github dashboard services -========================= - -This is some basic (and terse) instructions for setting up a dashboard -server that responds to Github webhooks for running dashboard tests -on demand. - -You will need to choose several parameters that will be used in the -configuration. - -Variable | Description ------------|------------ -`PORT` | Local web server port -`SUBDOMAIN`| ngrok subdomain -`WEBROOT` | Path to this directory - - -Setup ------ - -1. Install [MongoDB](http://www.mongodb.org/). -2. Install [ngrok](https://ngrok.com/) -3. Create a free ngrok account. -4. Install python requirements, `pip install -r requirements.txt`. - - -Create a personal access token and webhook token ------------------------------------------------- - -On github, under account settings -> Applications, generate a new -personal access token with `repo:status` scope. In your repository -settings page under Webhooks & Services, add a webhook with payload URL, -`http://${SUBDOMAIN}.ngrok.com/main` and a secret token. Now save these -tokens in a file called `~/.geojs_dashboard_config.json`. -``` -{ - "dashboard_key": "", - "hook_key": "" -} -``` - - -Configure your system ---------------------- - -Start tangelo: -``` -tangelo --port ${PORT} --root ${WEBROOT} start -``` - -Create your ngrok config with authentication information as -described at [https://ngrok.com/dashboard](https://ngrok.com/dashboard), -then modify `~/.ngrok` to contain the following: -``` -tunnels: - github: - subdomain: "${SUBDOMAIN}" - proto: - http: "${PORT}" -``` - -Start your ngrok tunnel. -``` -ngrok -log=stdout start github >& ngrok.log -``` - -Test that everything is working. -``` ->>> curl http://${SUBDOMAIN}.ngrok.com/main -I hear you! -``` - - -What now?! ----------- - -Your system is now receiving webhooks from Github. All pushes are being queued in -a mongo database `geojs_dashboard` in the collection `queue`. The `main.py` script -also acts as test runner that will run all tests currently queued. Once you have -an item in your queue, you can run the test with `python main.py`. The test will -run, then it will submit the results to cdash, update the commit status in github -and delete the queue item. diff --git a/dashboard/github_service/dashboard.py b/dashboard/github_service/dashboard.py deleted file mode 100644 index 1e2840fa85..0000000000 --- a/dashboard/github_service/dashboard.py +++ /dev/null @@ -1,235 +0,0 @@ -#!/usr/bin/env python - -import os -import shutil -import socket -from datetime import datetime -import subprocess as sp -import json - -from pymongo import MongoClient - - -_ctest = ''' -set(CTEST_SOURCE_DIRECTORY "{source}") -set(CTEST_BINARY_DIRECTORY "{build}") - -include(${{CTEST_SOURCE_DIRECTORY}}/CTestConfig.cmake) -set(CTEST_SITE "{site}") -set(CTEST_BUILD_NAME "{name}") -set(CTEST_CMAKE_GENERATOR "Unix Makefiles") - -ctest_start("Experimental") -ctest_configure() -ctest_build() -ctest_test(PARALLEL_LEVEL 1 RETURN_VALUE res) -ctest_submit() - -if(NOT res EQUAL 0) - message(FATAL_ERROR "Test failures occurred.") -endif() -''' - -_host = socket.gethostname().split('.')[0] - - -def config(): - return { - 'mongo-host': 'lusitania', - 'mongo-port': 27017, - 'mongo-database': 'geojs_dashboard', - 'test-dir': '~/geojs-testing', - 'repo': 'https://github.com/OpenGeoscience/geojs.git', - 'kill-server': '/Users/jbeezley/bin/killtestserver', - 'add-path': '/usr/local/bin', - 'cmake': '/usr/local/bin/cmake', - 'ctest': '/usr/local/bin/ctest', - 'git': '/usr/local/bin/git' - } - - -def _communicate(cmd, **kw): - cfg = config() - pth = os.environ.get('PATH', '') - if cfg.get('add-path'): - pth = cfg['add-path'] + ':' + pth - kw['stderr'] = sp.STDOUT - kw['stdout'] = sp.PIPE - kw['shell'] = True - p = sp.Popen( - '/usr/bin/env PATH=' + pth + ' ' + cmd, - **kw - ) - out, err = p.communicate() - return p.returncode, out - - -def run_test(repo, commit, testdir, branch): - - cfg = config() - git = cfg.get('git', 'git') - cmake = cfg.get('cmake', 'cmake') - ctest = cfg.get('ctest', 'ctest') - print cmake - # ====================== - # git clone and checkout - # ====================== - s, out = _communicate(' '.join([ - git, 'clone', - '--recursive', - repo, testdir - ])) - if s != 0: - return (False, 'clone "%s" failed' % repo, out) - - s, out = _communicate(' '.join([ - git, - '-C', testdir, - 'checkout', - commit - ])) - if s != 0: - return (False, 'checkout "%s" failed' % commit, out) - - s, out = _communicate(' '.join([ - git, - '-C', testdir, - 'submodule', 'update' - ])) - if s != 0: - return (False, 'submodule update failed', out) - - # ========= - # configure - # ========= - builddir = os.path.join(testdir, '_build') - os.makedirs(builddir) - s, out = _communicate( - ' '.join([ - cmake, - '-D', 'CHROME_TESTS=OFF', - '-D', 'FIREFOX_TESTS=ON', - '-D', 'COVERAGE_TESTS=OFF', - '..' - ]), - cwd=builddir - ) - if s != 0: - return (False, 'cmake configure failed', out) - - # ============== - # build and test - # ============== - build_script = os.path.join(builddir, 'build.cmake') - kw = { - 'source': testdir, - 'build': builddir, - 'site': _host, - 'name': branch + '-' + commit[:6] - } - open(build_script, 'w').write( - _ctest.format(**kw) - ) - s, out = _communicate( - ctest + ' -VV -S {}'.format(build_script), - cwd=builddir - ) - test_result = s - test_output = out - - if test_result != 0: - return (False, 'Test(s) failed', test_output) - - return (True, 'All tests passed!', test_output) - - -def start_test(item, oldTest=None): - if oldTest: - status = { - 'pass': oldTest['status']['pass'], - 'output': oldTest['status']['output'], - 'reason': 'Already tested in branch %s' % oldTest['branch'], - 'skipped': True - } - else: - cfg = config() - basedir = os.path.expanduser(cfg['test-dir']) - testdir = os.path.join(basedir, item['commit']) - shutil.rmtree(testdir, ignore_errors=True) - try: - os.makedirs(testdir) - except OSError: - pass - result = run_test(cfg['repo'], item['commit'], testdir, item['branch']) - - status = { - 'pass': result[0], - 'reason': result[1], - 'output': result[2], - 'skipped': False - } - - return status - - -def notify(item, status): - ''' - Do something to notify people, not sure what. - ''' - pass - - -def nightly(queue, results): - for item in queue.find(): - oldTest = results.find_one({'commit': item['commit']}) - status = start_test(item, oldTest) - if not oldTest: - result = dict(item) - result.pop('_id') - result['time'] = datetime.now() - result['status'] = status - results.insert(result) - queue.remove(item) - notify(item, status) - - -def continuous(sha, branch, user, queue, results): - oldTest = results.find_one({'commit': sha}) - item = { - 'commit': sha, - 'user': user, - 'branch': branch, - 'time': datetime.now() - } - status = start_test(item, oldTest) - if not oldTest: - result = dict(item) - result['time'] = datetime.now() - result['status'] = status - results.insert(result) - notify(item, status) - return status - - -def main(*args): - cfg = config() - cl = MongoClient( - host=cfg['mongo-host'], - port=cfg['mongo-port'], - ) - db = cl[cfg['mongo-database']] - queue = db['queue'] - results = db['results'] - - if cfg.get('kill-server'): - sp.call(cfg['kill-server'], shell=True) - - if not len(args) or args[0] == 'nightly': - nightly(queue, results) - else: - return continuous(*args[:3], queue=queue, results=results) - - -if __name__ == '__main__': - import sys - print json.dumps(main(*sys.argv[1:]), indent=4) diff --git a/dashboard/github_service/index.html b/dashboard/github_service/index.html deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/dashboard/github_service/main.py b/dashboard/github_service/main.py deleted file mode 100644 index e7d0d2b7e6..0000000000 --- a/dashboard/github_service/main.py +++ /dev/null @@ -1,245 +0,0 @@ -#!/usr/bin/env python - -import os -import sys -from datetime import datetime -import json -import hmac -import hashlib - -import pymongo -import tangelo -import requests -import cherrypy - -import dashboard - -global _geojs_test_mongo - -_cdash_url = 'http://my.cdash.org/index.php?project=geojs' -_geojs_test_mongo = None -_github_api = 'https://api.github.com' -_geojs_owner = 'OpenGeoscience' -_geojs_repo = 'geojs' -_auth_token = os.environ.get('GEOJS_DASHBOARD_KEY') -_secret_key = os.environ.get('GEOJS_HOOK_KEY') -try: - _config = json.loads( - open( - os.path.expanduser('~/.geojs_dashboard_config.json'), - 'r' - ).read() - ) - _auth_token = _config['dashboard_key'] - _secret_key = _config['hook_key'] -except Exception: - pass - -if not _auth_token or not _secret_key: - raise Exception('GEOJS_DASHBOARD_KEY and GEOJS_HOOK_KEY required.') - - -def mongo_client(): - ''' - Get a global reference to the mongo client. - ''' - global _geojs_test_mongo - if _geojs_test_mongo is None or not _geojs_test_mongo.alive(): - _geojs_test_mongo = pymongo.MongoClient() - return _geojs_test_mongo - - -def mongo_database(): - ''' - Return the database containing the queue collection. - ''' - return mongo_client()['geojs_dashboard'] - - -def add_push(obj): - ''' - Add a push to the test queue. - ''' - db = mongo_database() - - # get the branch name w/o refs/heads - branch = '/'.join(obj['ref'].split('/')[2:]) - - # get the new commit hash - commit = obj['after'] - if commit == '0' * 40: - # ignore branch deletions - return - - # get the username of the person who pushed the branch - user = obj['pusher']['name'] - - # set a time stamp - timestamp = datetime.now() - - # check if the hash has already been tested - tested = db['results'] - if tested.find_one({'hash': commit}): - return - - # queue the commit for testing - queue = db['queue'] - context = branch + '/geojs_dashboard' - item = { - 'branch': branch, - 'commit': commit, - 'user': user, - 'time': timestamp, - 'context': context - } - queue.update({'context': context}, item, upsert=True) - - # set the status of the tip of the push to pending - url = '/'.join(( - _github_api, - 'repos', - _geojs_owner, - _geojs_repo, - 'statuses', - commit - )) - data = json.dumps({ - 'state': 'pending', - 'target_url': _cdash_url, - 'context': context, - 'description': 'running dashboard tests' - }) - resp = requests.post( - url, - auth=(_auth_token, 'x-oauth-basic'), - data=data - ) - if not resp.ok: - print >> sys.stderr("Could not set pending status") - - -def run_test(obj): - ''' - Runs a test from a queue object. After the test is run, - sets the status on github to the result. - ''' - branch = obj['branch'] - context = obj['context'] - commit = obj['commit'] - user = obj['user'] - url = '/'.join(( - _github_api, - 'repos', - _geojs_owner, - _geojs_repo, - 'statuses', - commit - )) - - # run the dashboard test locally - try: - status = dashboard.main( - commit, - branch, - user - ) - except Exception as e: - # something went wrong in the dashboard, so set the - # status to error and exit - data = json.dumps({ - 'state': 'error', - 'target_url': _cdash_url, - 'context': context, - 'description': 'Dashboard failure detected: ' + str(e) - }) - requests.post( - url, - auth=(_auth_token, 'x-oauth-basic'), - data=data - ) - return - - # set status - if status['pass']: - data = json.dumps({ - 'state': 'success', - 'target_url': _cdash_url, # can we get the actual url of the test from cdash? - 'context': context, - 'description': 'All geojs dashboard tests passed!' - }) - requests.post( - url, - auth=(_auth_token, 'x-oauth-basic'), - data=data - ) - else: - data = json.dumps({ - 'state': 'failure', - 'target_url': _cdash_url, # can we get the actual url of the test from cdash? - 'context': context, - 'description': status['reason'] - }) - requests.post( - url, - auth=(_auth_token, 'x-oauth-basic'), - data=data - ) - - -@tangelo.restful -def get(*arg, **kwarg): - ''' - Just to make sure the server is listening. - ''' - return 'I hear you!' - - -@tangelo.restful -def post(*arg, **kwarg): - ''' - This is the main listener for github webhooks. - ''' - - # retrieve the headers from the request - # headers = tangelo.request_headers() # <- not merged - headers = cherrypy.request.headers - - # get the request body as a dict - body = tangelo.request_body() - s = body.read() - - # make sure this is a valid request coming from github - computed_hash = hmac.new(str(_secret_key), s, hashlib.sha1).hexdigest() - received_hash = headers.get('X-Hub-Signature', 'sha1=')[5:] - if not hmac.compare_digest(computed_hash, received_hash): - return tangelo.HTTPStatusCode(403, "Invalid signature") - - try: - obj = json.loads(s) - except: - return tangelo.HTTPStatusCode(400, "Could not load json object.") - - if headers['X-Github-Event'] == 'push': - # add a new item to the test queue - add_push(obj) - else: - return tangelo.HTTPStatusCode(400, "Unhandled event") - - return 'OK' - - -def main(): - ''' - On commandline call, get all queued tests, run them, and set the status. - ''' - - db = mongo_database() - queue = db['queue'] - - for item in queue.find(): - run_test(item) - queue.remove(item) - - -if __name__ == '__main__': - main() diff --git a/dashboard/github_service/requirements.txt b/dashboard/github_service/requirements.txt deleted file mode 100644 index c5d4ed2b93..0000000000 --- a/dashboard/github_service/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -tangelo -requests -pymongo diff --git a/docs/conf.py b/docs/conf.py index 89cf41bf81..f3d0164c92 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -89,7 +89,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +# exclude_patterns = ['images'] # The reST default role (used for this markup: `text`) to use for all # documents. diff --git a/docs/developers.rst b/docs/developers.rst index 8b4b5d2f52..cd6acabdad 100644 --- a/docs/developers.rst +++ b/docs/developers.rst @@ -22,17 +22,15 @@ checked against `ESLint `_ for uniform styling and strict for common errors patterns. The style rules for geojs are located in the ``.eslintrc`` file in the root of the repository. These tests are preformed automatically for every file added to the build; no -additional configuration is required. You can run a quick check of the -code style outside of CMake by running ``npm run lint``. +additional configuration is required. You can check the code style +by running ``npm run lint``. Code coverage ------------- -Code coverage information is generated automatically for all headless unit tests -by Karma's test runner when running ``npm run test``. The coverage information is -submitted to `codecov `_ and -`cdash `_ after every -successful Travis run. +Code coverage information is generated automatically for all headless unit +tests by Karma's test runner when running ``npm run test``. The coverage +information is submitted to `codecov `_ after every successful CI run. Headless browser testing ------------------------ @@ -67,19 +65,15 @@ Headless WebGL testing ---------------------- To fully test code that uses WebGL, a browser with WebGL is required. -If xvfb, osmesa, and Firefox are installed, some tests can be run in a virtual -frame buffer that doesn't require a display. May of these tests depend on -additional data which can be downloaded by using CMake and running ctest. +If xvfb, osmesa, and Firefox or Chrome are installed, some tests can be run in +a virtual frame buffer that doesn't require a display. May of these tests +depend on additional data which can be downloaded by ``npm run get-data-files``. For example, running :: - cmake /path/to/geojs - make - xvfb-run -s '-ac -screen 0 1280x1024x24' ctest -VV -R ffheadless + npm run test-headed-xvfb -will run the headless WebGL tests. After the data for tests is downloaded, -the tests can also be run via ``npm run test-webgl``, which assumes that -``xvfb-run`` is available. +will run the headless WebGL tests. The headless unit tests that require WebGL should be placed in the ``tests/gl-cases/`` directory. When tests are run in a normal browser via @@ -98,16 +92,7 @@ resemblejs, the default highlight color is pink). Unless an image comparison test fails, images are not automatically saved. To save all images, add the environment variable ``TEST_SAVE_IMAGE=all`` to the -test command or set this parameter in CMake. - -.. note:: - - Typically, CMake is used to build outside of the source tree. This - means you would create a new directory somewhere and point cmake - to the geojs source directory. You may need to rerun ``cmake`` and - ``make`` after making changes to your code for everything to - build correctly. Try running ``ccmake /path/to/geojs`` for a full - list of configuration options. +test command. Examples and tests that need to run in a standard browser should be tested by creating an entry in the ``tests/headed-cases/`` directory. To run these tests diff --git a/docs/provisioning.rst b/docs/provisioning.rst index 5ef736edd8..0c1cb18886 100644 --- a/docs/provisioning.rst +++ b/docs/provisioning.rst @@ -4,33 +4,50 @@ Provisioning for Development .. _ubuntu-development: -Ubuntu 18.04 -------------- +Ubuntu 20.04 +------------ -This shows how to set up a build and test environment in Ubuntu 18.04. +This shows how to set up a build and test environment in Ubuntu 20.04. -These instructions will probably work for any Ubuntu release from 18.04 -onward. They assume a basic installation. +These instructions will probably work for other Ubuntu releases. They assume a +basic installation. Add nodejs to the sources so it can be installed :: - wget -qO- https://deb.nodesource.com/setup_8.x | sudo bash - + wget -qO- https://deb.nodesource.com/setup_15.x | sudo bash - Install required packages (you may want to also include cmake-curses-gui for convenience in configuring CMake options) :: sudo apt-get install --yes \ - cmake \ + cpio \ firefox-esr \ + fonts-dejavu \ git \ imagemagick \ - libjpeg-dev \ - libpango1.0-dev \ mesa-utils \ nodejs \ - python-pip \ + optipng \ + software-properties-common \ + unzip \ xauth \ - xvfb + xvfb \ + # these packages are needed for Chrome \ + fonts-liberation \ + libappindicator3-1 \ + libasound2 \ + libgbm1 \ + libnspr4 \ + libnss3 \ + libxss1 \ + libxtst6 \ + xdg-utils + +Install Chrome :: + + export CHROME_SOURCE_URL=https://dl.google.com/dl/linux/direct/google-chrome-stable_current_amd64.deb && \ + wget --no-verbose -O /tmp/$(basename $CHROME_SOURCE_URL) $CHROME_SOURCE_URL && \ + dpkg -i /tmp/$(basename $CHROME_SOURCE_URL) Checkout the GeoJS source and change to the source directory :: @@ -41,27 +58,14 @@ Install node modules :: npm install -Build GeoJS and run some basic tests :: +Build GeoJS and run all the tests :: - npm run build - npm run lint - npm run test + npm run ci-xvfb -Note that some of the tests measure speed, and therefore may fail if you are -running on slow hardware or in a limited virtual machine. - -Use CMake to create additional tests and make to download test data :: - - cmake . - make - -Run the headless WebGL tests :: - - ctest -VV -R headless - -Run all tests :: - - xvfb-run -s '-ac -screen 0 1280x1024x24' ctest --output-on-failure +Build the website :: + + npm run setup-website + npm run build-website Install python packages :: diff --git a/docs/quickstart.rst b/docs/quickstart.rst index 95f6a3f4f9..f59beed63f 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -15,7 +15,6 @@ The following software is required to build geojs from source: For testing and development, the following additional software is required: * `Python 3 `_ -* `CMake `_ In addition, the following python modules are recommended for development and testing of geojs. @@ -31,8 +30,7 @@ For testing WebGL in a headless environment, the additional packages are needed: * `Firefox `_ For an example on how to install all packages for a specific OS, see -:ref:`Ubuntu 14.04 Provisioning `. - +:ref:`Ubuntu Provisioning `. Getting the source code ----------------------- diff --git a/karma-base.js b/karma-base.js index 6a798c11f1..c4a9ae6ce0 100644 --- a/karma-base.js +++ b/karma-base.js @@ -2,17 +2,13 @@ var webpack_config = require('./webpack.base.config'); var url = require('url'); var fs = require('fs'); var path = require('path'); -var notes_path = process.env.CTEST_NOTES_PATH || path.resolve('notes'); -var image_path = process.env.CTEST_IMAGE_PATH || path.resolve('images'); +var image_path = process.env.TEST_IMAGE_PATH || path.resolve('_build/images'); var test_case = process.env.GEOJS_TEST_CASE || 'tests/all.js'; var getRawBody = require('raw-body'); -// Create the notes directory, if it doesn't exist. -if (!fs.existsSync(notes_path)) { - fs.mkdirSync(notes_path); -} +// Create the images directory, if it doesn't exist. if (!fs.existsSync(image_path)) { - fs.mkdirSync(image_path); + fs.mkdirSync(image_path, {recursive: true}); } /** @@ -112,41 +108,14 @@ function compareImage(name, image, threshold, callback) { } /** - * Express style middleware to handle REST requests to `/notes` and - * `/testImage` on the test server. + * Express style middleware to handle REST requests to `/testImage` on the test + * server. */ -var notes_middleware = function (config) { - var notes = {}; - +var testimage_middleware = function (config) { return function (request, response, next) { var parsed = url.parse(request.url, true); var query = (parsed.query || {}); - var key = query.key || 'default'; - if (parsed.pathname === '/notes') { - if (request.method === 'PUT') { - return getRawBody(request, {encoding: 'utf-8'}).then(function (body) { - body = body.toString() || '{}'; - notes[key] = JSON.parse(body); - fs.writeFileSync(path.resolve(notes_path, key) + '.json', body); - response.writeHead(200); - return response.end('{}'); - }).catch(function (err) { - response.writeHead(500); - response.end(err.message); - }); - } else if (request.method === 'POST' && query.length) { - fs.writeFileSync(query.path || 'notes.txt', JSON.stringify(notes)); - response.writeHead(200); - return response.end('{}'); - } else if (request.method === 'DELETE') { - notes = {}; - response.writeHead(200); - return response.end('{}'); - } else if (request.method === 'GET') { - response.writeHead(200); - return response.end(JSON.stringify(notes)); - } - } else if (parsed.pathname === '/testImage') { + if (parsed.pathname === '/testImage') { if (request.method === 'PUT') { return getRawBody(request).then(function (body) { var name = query.name; @@ -348,11 +317,11 @@ module.exports = function (config) { // We could suppress passing results // specReporter = {suppressPassed: true, suppressSkipped: true}, middleware: [ - 'notes', + 'testimage', 'osmtiles' ], plugins: [ - {'middleware:notes': ['factory', notes_middleware]}, + {'middleware:testimage': ['factory', testimage_middleware]}, {'middleware:osmtiles': ['factory', osmtiles_middleware]}, 'karma-*' ], diff --git a/karma-cov.conf.js b/karma-cov.conf.js index 272cc840c6..8117504c9b 100644 --- a/karma-cov.conf.js +++ b/karma-cov.conf.js @@ -35,7 +35,7 @@ module.exports = function (config) { {type: 'cobertura', dir: 'dist/cobertura/', file: 'coverage.xml', subdir: subdir_name}, {type: 'json', dir: 'dist/coverage/json/', subdir: subdir_name}, {type: 'lcovonly', dir: 'dist/coverage/lcov', subdir: subdir_name}, - {type: 'text'} + {type: 'text-summary'} ] }; /* Alter our first webpack module rule which should just apply to src/*.js diff --git a/package-lock.json b/package-lock.json index cd9ad2830e..2bba6e456a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -519,6 +519,23 @@ "integrity": "sha512-NTBIIbAfkJeIletyABbVtdPgeKfDafR+1mZV/AyyfC1UkVkp9iUjV+wwmqtUgphHYajbI86jejBJp5e+jkGTiQ==", "dev": true }, + "archive-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/archive-type/-/archive-type-4.0.0.tgz", + "integrity": "sha1-+S5yIzBW38aWlHJ0nCZ72wRrHXA=", + "dev": true, + "requires": { + "file-type": "^4.2.0" + }, + "dependencies": { + "file-type": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-4.4.0.tgz", + "integrity": "sha1-G2AOX8ofvcboDApwxxyNul95BsU=", + "dev": true + } + } + }, "are-we-there-yet": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz", @@ -1876,6 +1893,16 @@ "file-uri-to-path": "1.0.0" } }, + "bl": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz", + "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", + "dev": true, + "requires": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, "blob": { "version": "0.0.5", "resolved": "https://registry.npmjs.org/blob/-/blob-0.0.5.tgz", @@ -1944,12 +1971,6 @@ "integrity": "sha512-0hL4A8OUiqABgPipGrojf/hyhr5RS257xCNARlbK34HaMfhV5fXvwEooN4/ri9+jgX47J4Wg24ZPmfZ2xD2cKw==", "dev": true }, - "bowser": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/bowser/-/bowser-1.9.4.tgz", - "integrity": "sha512-9IdMmj2KjigRq6oWhmwv1W36pDuA4STQZ8q6YO9um+x07xgYNCD3Oou+WP/3L1HNz7iqythGet3/p4wvc8AAwQ==", - "dev": true - }, "boxen": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/boxen/-/boxen-1.3.0.tgz", @@ -3550,6 +3571,30 @@ "integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=", "dev": true }, + "decompress": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/decompress/-/decompress-4.2.1.tgz", + "integrity": "sha512-e48kc2IjU+2Zw8cTb6VZcJQ3lgVbS4uuB1TfCHbiZIP/haNXm+SVyhu+87jts5/3ROpd82GSVCoNs/z8l4ZOaQ==", + "dev": true, + "requires": { + "decompress-tar": "^4.0.0", + "decompress-tarbz2": "^4.0.0", + "decompress-targz": "^4.0.0", + "decompress-unzip": "^4.0.1", + "graceful-fs": "^4.1.10", + "make-dir": "^1.0.0", + "pify": "^2.3.0", + "strip-dirs": "^2.0.0" + }, + "dependencies": { + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } + } + }, "decompress-response": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-4.2.1.tgz", @@ -3559,6 +3604,85 @@ "mimic-response": "^2.0.0" } }, + "decompress-tar": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-tar/-/decompress-tar-4.1.1.tgz", + "integrity": "sha512-JdJMaCrGpB5fESVyxwpCx4Jdj2AagLmv3y58Qy4GE6HMVjWz1FeVQk1Ct4Kye7PftcdOo/7U7UKzYBJgqnGeUQ==", + "dev": true, + "requires": { + "file-type": "^5.2.0", + "is-stream": "^1.1.0", + "tar-stream": "^1.5.2" + } + }, + "decompress-tarbz2": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz", + "integrity": "sha512-s88xLzf1r81ICXLAVQVzaN6ZmX4A6U4z2nMbOwobxkLoIIfjVMBg7TeguTUXkKeXni795B6y5rnvDw7rxhAq9A==", + "dev": true, + "requires": { + "decompress-tar": "^4.1.0", + "file-type": "^6.1.0", + "is-stream": "^1.1.0", + "seek-bzip": "^1.0.5", + "unbzip2-stream": "^1.0.9" + }, + "dependencies": { + "file-type": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-6.2.0.tgz", + "integrity": "sha512-YPcTBDV+2Tm0VqjybVd32MHdlEGAtuxS3VAYsumFokDSMG+ROT5wawGlnHDoz7bfMcMDt9hxuXvXwoKUx2fkOg==", + "dev": true + } + } + }, + "decompress-targz": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-targz/-/decompress-targz-4.1.1.tgz", + "integrity": "sha512-4z81Znfr6chWnRDNfFNqLwPvm4db3WuZkqV+UgXQzSngG3CEKdBkw5jrv3axjjL96glyiiKjsxJG3X6WBZwX3w==", + "dev": true, + "requires": { + "decompress-tar": "^4.1.1", + "file-type": "^5.2.0", + "is-stream": "^1.1.0" + } + }, + "decompress-unzip": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/decompress-unzip/-/decompress-unzip-4.0.1.tgz", + "integrity": "sha1-3qrM39FK6vhVePczroIQ+bSEj2k=", + "dev": true, + "requires": { + "file-type": "^3.8.0", + "get-stream": "^2.2.0", + "pify": "^2.3.0", + "yauzl": "^2.4.2" + }, + "dependencies": { + "file-type": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz", + "integrity": "sha1-JXoHg4TR24CHvESdEH1SpSZyuek=", + "dev": true + }, + "get-stream": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-2.3.1.tgz", + "integrity": "sha1-Xzj5PzRgCWZu4BUKBUFn+Rvdld4=", + "dev": true, + "requires": { + "object-assign": "^4.0.1", + "pinkie-promise": "^2.0.0" + } + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } + } + }, "deep-equal": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.0.3.tgz", @@ -3837,6 +3961,68 @@ "is-obj": "^1.0.0" } }, + "download": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/download/-/download-8.0.0.tgz", + "integrity": "sha512-ASRY5QhDk7FK+XrQtQyvhpDKanLluEEQtWl/J7Lxuf/b+i8RYh997QeXvL85xitrmRKVlx9c7eTrcRdq2GS4eA==", + "dev": true, + "requires": { + "archive-type": "^4.0.0", + "content-disposition": "^0.5.2", + "decompress": "^4.2.1", + "ext-name": "^5.0.0", + "file-type": "^11.1.0", + "filenamify": "^3.0.0", + "get-stream": "^4.1.0", + "got": "^8.3.1", + "make-dir": "^2.1.0", + "p-event": "^2.1.0", + "pify": "^4.0.1" + }, + "dependencies": { + "file-type": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-11.1.0.tgz", + "integrity": "sha512-rM0UO7Qm9K7TWTtA6AShI/t7H5BPjDeGVDaNyg9BjHAj3PysKy7+8C8D137R88jnR3rFJZQB/tFgydl5sN5m7g==", + "dev": true + }, + "get-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "dev": true, + "requires": { + "pump": "^3.0.0" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + } + } + }, "download-stats": { "version": "0.3.4", "resolved": "https://registry.npmjs.org/download-stats/-/download-stats-0.3.4.tgz", @@ -4962,6 +5148,25 @@ } } }, + "ext-list": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/ext-list/-/ext-list-2.2.2.tgz", + "integrity": "sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==", + "dev": true, + "requires": { + "mime-db": "^1.28.0" + } + }, + "ext-name": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ext-name/-/ext-name-5.0.0.tgz", + "integrity": "sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ==", + "dev": true, + "requires": { + "ext-list": "^2.0.0", + "sort-keys-length": "^1.0.0" + } + }, "extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -5420,6 +5625,12 @@ "schema-utils": "^1.0.0" } }, + "file-type": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", + "integrity": "sha1-LdvqfHP/42No365J3DOMBYwritY=", + "dev": true + }, "file-uri-to-path": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", @@ -5442,6 +5653,23 @@ "integrity": "sha1-wcS5vuPglyXdsQa3XB4wH+LxiyY=", "dev": true }, + "filename-reserved-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz", + "integrity": "sha1-q/c9+rc10EVECr/qLZHzieu/oik=", + "dev": true + }, + "filenamify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/filenamify/-/filenamify-3.0.0.tgz", + "integrity": "sha512-5EFZ//MsvJgXjBAFJ+Bh2YaCTRF/VP1YOmGrgt+KJ4SFRLjI87EIdwLLuT6wQX0I4F9W41xutobzczjsOKlI/g==", + "dev": true, + "requires": { + "filename-reserved-regex": "^2.0.0", + "strip-outer": "^1.0.0", + "trim-repeated": "^1.0.0" + } + }, "fileset": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/fileset/-/fileset-0.2.1.tgz", @@ -5785,6 +6013,12 @@ "null-check": "^1.0.0" } }, + "fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "dev": true + }, "fs-extra": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", @@ -6290,6 +6524,33 @@ } } }, + "glslang-validator-prebuilt": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/glslang-validator-prebuilt/-/glslang-validator-prebuilt-0.0.6.tgz", + "integrity": "sha512-kd6UxEwoWWxjpzV8QZuaILnNygkbR7QZ/X/82Gpm8+r1+1xlkrJz8ZeZ5nlVzEPSWNzP+ydKUGeTiSE6IKxZCA==", + "dev": true, + "requires": { + "decompress": "^4.2.1", + "download": "^8.0.0", + "mkdirp": "^1.0.4", + "pify": "^5.0.0", + "temp-dir": "^2.0.0" + }, + "dependencies": { + "mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true + }, + "pify": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", + "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==", + "dev": true + } + } + }, "got": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/got/-/got-8.3.2.tgz", @@ -7233,6 +7494,12 @@ "integrity": "sha512-T/S49scO8plUiAOA2DBTBG3JHpn1yiw0kRp6dgiZ0v2/6twi5eiB0rHtHFH9ZIrvlWc6+4O+m4zg5+Z833aXgw==", "dev": true }, + "is-natural-number": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", + "integrity": "sha1-q5124dtM7VHjXeDHLr7PCfc0zeg=", + "dev": true + }, "is-negative-zero": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.0.tgz", @@ -10627,6 +10894,15 @@ "p-reduce": "^1.0.0" } }, + "p-event": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/p-event/-/p-event-2.3.1.tgz", + "integrity": "sha512-NQCqOFhbpVTMX4qMe8PF8lbGtzZ+LCiN7pcNrb/413Na7+TRoe1xkKUzuWa/YEJdGQ0FvKtj35EEbDoVPO2kbA==", + "dev": true, + "requires": { + "p-timeout": "^2.0.1" + } + }, "p-finally": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", @@ -13146,6 +13422,23 @@ "integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==", "optional": true }, + "seek-bzip": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.6.tgz", + "integrity": "sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ==", + "dev": true, + "requires": { + "commander": "^2.8.1" + }, + "dependencies": { + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + } + } + }, "semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", @@ -13706,6 +13999,15 @@ "is-plain-obj": "^1.0.0" } }, + "sort-keys-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sort-keys-length/-/sort-keys-length-1.0.1.tgz", + "integrity": "sha1-nLb09OnkgVWmqgZx7dM2/xR5oYg=", + "dev": true, + "requires": { + "sort-keys": "^1.0.0" + } + }, "source-list-map": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz", @@ -14112,6 +14414,15 @@ "strip-bom": "^2.0.0" } }, + "strip-dirs": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/strip-dirs/-/strip-dirs-2.1.0.tgz", + "integrity": "sha512-JOCxOeKLm2CAS73y/U4ZeZPTkE+gNVCzKt7Eox84Iej1LT/2pTWYpZKJuxwQpvX1LiZb1xokNR7RLfuBAa7T3g==", + "dev": true, + "requires": { + "is-natural-number": "^4.0.1" + } + }, "strip-eof": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", @@ -14139,6 +14450,15 @@ "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", "dev": true }, + "strip-outer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/strip-outer/-/strip-outer-1.0.1.tgz", + "integrity": "sha512-k55yxKHwaXnpYGsOzg4Vl8+tDrWylxDEpknGjhTiZB8dFRU5rTo9CAzeycivxV3s+zlTKwrs6WxMxR95n26kwg==", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.2" + } + }, "style-loader": { "version": "0.23.1", "resolved": "https://registry.npmjs.org/style-loader/-/style-loader-0.23.1.tgz", @@ -14356,6 +14676,21 @@ } } }, + "tar-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", + "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", + "dev": true, + "requires": { + "bl": "^1.0.0", + "buffer-alloc": "^1.2.0", + "end-of-stream": "^1.0.0", + "fs-constants": "^1.0.0", + "readable-stream": "^2.3.0", + "to-buffer": "^1.1.1", + "xtend": "^4.0.0" + } + }, "temp": { "version": "0.8.4", "resolved": "https://registry.npmjs.org/temp/-/temp-0.8.4.tgz", @@ -14365,6 +14700,12 @@ "rimraf": "~2.6.2" } }, + "temp-dir": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-2.0.0.tgz", + "integrity": "sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg==", + "dev": true + }, "term-size": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/term-size/-/term-size-1.2.0.tgz", @@ -14487,6 +14828,12 @@ "integrity": "sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=", "dev": true }, + "to-buffer": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz", + "integrity": "sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg==", + "dev": true + }, "to-fast-properties": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-1.0.3.tgz", @@ -14569,6 +14916,15 @@ "integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=", "dev": true }, + "trim-repeated": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-1.0.0.tgz", + "integrity": "sha1-42RqLqTokTEr9+rObPsFOAvAHCE=", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.2" + } + }, "trim-right": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/trim-right/-/trim-right-1.0.1.tgz", @@ -14756,6 +15112,28 @@ "integrity": "sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og==", "dev": true }, + "unbzip2-stream": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz", + "integrity": "sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg==", + "dev": true, + "requires": { + "buffer": "^5.2.1", + "through": "^2.3.8" + }, + "dependencies": { + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + } + } + }, "underscore": { "version": "1.11.0", "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.11.0.tgz", diff --git a/package.json b/package.json index 43c041c070..e40c0cac1b 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,6 @@ "body-parser": "^1.19.0", "bootstrap": "^3.4.1", "bootswatch": "^3.4.1", - "bowser": "^1.9.4", "codemirror": "^5.54.0", "colorbrewer": "1.3.0", "css-loader": "^0.28.11", @@ -58,6 +57,7 @@ "forever": "^0.15.2", "fs-extra": "^7.0.0", "glob": "^7.1.6", + "glslang-validator-prebuilt": "0.0.6", "imports-loader": "^0.8.0", "istanbul-combine": "^0.3.0", "istanbul-instrumenter-loader": "^3.0.1", @@ -112,17 +112,23 @@ "build-website-tutorials": "node tutorials/build-website.js && webpack --config webpack-website-tutorials.config.js", "lint": "eslint --cache . --max-warnings=0", "puglint": "pug-lint src examples", - "test": "GEOJS_TEST_CASE=tests/test-unit.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch,FirefoxHeadlessTouch,PhantomJS", - "test-all": "GEOJS_TEST_CASE=tests/test-unit.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch,FirefoxHeadlessTouch,PhantomJS", + "glsllint": "find . \\( -name '*.frag' \\) -exec sh -c 'for n; do python scripts/preprocess_glsl.py \"$n\" | node_modules/glslang-validator-prebuilt/bin/glslangValidator --stdin -S frag || exit 1; done' sh {} \\+ && find . \\( -name '*.vert' \\) -exec sh -c 'for n; do python scripts/preprocess_glsl.py \"$n\" | node_modules/glslang-validator-prebuilt/bin/glslangValidator --stdin -S vert || exit 1; done' sh {} \\+", + "test-headless": "GEOJS_TEST_CASE=tests/test-unit.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch,FirefoxHeadlessTouch,PhantomJS", + "test-headless-all": "GEOJS_TEST_CASE=tests/test-unit.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch,FirefoxHeadlessTouch,PhantomJS", + "test-headed": "GEOJS_TEST_CASE=tests/test-headed.js karma start karma-cov.conf.js --single-run --browsers ChromeFull", + "tets-headed-all": "GEOJS_TEST_CASE=tests/test-headed.js karma start karma-cov.conf.js --single-run --browsers ChromeFull,FirefoxWithProxy", + "test-headed-xvfb": "GEOJS_TEST_CASE=tests/test-headed.js xvfb-run -s '-ac -screen 0 1280x1024x24' karma start karma-cov.conf.js --single-run --browsers ChromeFull", + "test-webglheadless": "GEOJS_TEST_CASE=tests/test-gl.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch", + "test-webglheadless-all": "GEOJS_TEST_CASE=tests/test-gl.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch,FirefoxHeadlessTouch", + "test-tutorials": "GEOJS_TEST_CASE=tests/tutorials.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch,PhantomJS", + "test-tutorialsci-all": "GEOJS_TEST_CASE=tests/tutorials.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch,FirefoxHeadlessTouch,PhantomJS", + "get-data-files": "node scripts/datastore.js dist/data", + "ci-clean": "git clean -fxd dist -e dist/data jsdoc/tmpl jsdoc/static images lcov", + "ci": "npm run ci-build && npm run ci-test", + "ci-build": "(webpack --config webpack.config.js & webpack --config webpack-lean.config.js & npm run docs & npm run get-data-files & npm run lint & npm run puglint & npm run glsllint & npm run build-examples & npm run build-tutorials & wait)", + "ci-test": "npm run test-headless && npm run test-headed && npm run test-webglheadless && npm run test-tutorials && npm run combine-coverage", + "ci-xvfb": "npm run ci-clean && npm run ci-build && TEST_SAVE_IMAGE='all' xvfb-run -s '-ac -screen 0 1280x1024x24' npm run ci-test", "start": "karma start karma.conf.js", - "glci": "GEOJS_TEST_CASE=tests/test-gl.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch", - "glci-all": "GEOJS_TEST_CASE=tests/test-gl.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch,FirefoxHeadlessTouch", - "headedci": "GEOJS_TEST_CASE=tests/test-headed.js karma start karma-cov.conf.js --single-run --browsers ChromeFull", - "headedci-all": "GEOJS_TEST_CASE=tests/test-headed.js karma start karma-cov.conf.js --single-run --browsers ChromeFull,FirefoxWithProxy", - "tutorialsci": "GEOJS_TEST_CASE=tests/tutorials.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch,PhantomJS", - "tutorialsci-all": "GEOJS_TEST_CASE=tests/tutorials.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch,FirefoxHeadlessTouch,PhantomJS", - "test-webgl": "GEOJS_TEST_CASE=tests/test-gl.js karma start karma-cov.conf.js --single-run --browsers ChromeHeadlessTouch", - "test-headed": "GEOJS_TEST_CASE=tests/test-headed.js xvfb-run -s '-ac -screen 0 1280x1024x24' karma start karma-cov.conf.js --single-run --browsers ChromeFull", "combine-coverage": "istanbul-combine -d dist/cobertura -r cobertura 'dist/coverage/json/**/coverage-final.json'", "examples": "node examples/build.js && webpack --config webpack-examples.config.js && node tutorials/build.js && webpack --config webpack-tutorials.config.js && node ./tests/runners/server.js --host \"${HOST-}\" --port ${PORT-8082} --dist", "start-test": "node examples/build.js; forever start ./tests/runners/server.js", @@ -130,7 +136,8 @@ "docs": "jsdoc --pedantic -d dist/apidocs -r src package.json -c jsdoc.conf.json", "website": "cd website && npx hexo server", "setup-website": "cd website && npm install", - "build-website": "npm run build && cp -a dist/built/. website/source/built && npm run build-website-examples && npm run build-website-tutorials && npm run docs && cd dist && find data \\( -name tiles -o -name base-images -o -name '*-hash-stamp' -o -name '*.tgz' \\) -prune -o \\( -print0 \\) | cpio -pmdL0 ../website/source && cp -ar apidocs/. ../website/source/apidocs && cd ../website && npm install && rm -f db.json && npx hexo generate", + "build-website": "npm run ci-build && npm run ci-build-website", + "ci-build-website": "cp -a dist/built/. website/source/built && (npm run build-website-examples & npm run build-website-tutorials & wait) && cd dist && find data \\( -name tiles -o -name base-images -o -name '*-hash-stamp' -o -name '*.tgz' \\) -prune -o \\( -print0 \\) | cpio -pmdL0 ../website/source && cp -ar apidocs/. ../website/source/apidocs && cd ../website && npm install && rm -f db.json && npx hexo generate", "prepublishOnly": "webpack --config webpack.config.js && webpack --config webpack-lean.config.js && cp dist/built/*.js ." }, "keywords": [ diff --git a/scripts/datastore.js b/scripts/datastore.js new file mode 100644 index 0000000000..db26118a29 --- /dev/null +++ b/scripts/datastore.js @@ -0,0 +1,89 @@ +var crypto = require('crypto'); +var fs = require('fs'); +var https = require('https'); +var path = require('path'); +var tar = require('tar'); + +var registry = { + 'AdderallCities2015.csv': 'c3e984482cc6db1193a6dca2a55396a2baad8541a5c8c679f33750b76f766f40a119ec3e63abbabcd095d752c3df8ce21bf24cbe629502121f24ba90b4b0674c', + 'base-images.tgz': '346dcdcf6e88aa9bfde684a311a452431af01b055f007e73839d42aa19ce0846af20d0bc296f7e1acca0af2759896d647dbbdbf07b20428f0e10464a1764c77e', + 'blue.jpg': '867b1f3c568289efc7d0dba97d827a2bc4d83a7465cebcb3b5aec7bac6a38cf70d037d1814402bc97ad1f2f6737cfb5ce97db0a4fb53a716e77fd3ba57a7ab3b', + 'cities.csv': '5a665e5feda24f28e5cf4ed0801b67e73bbcf3ea781b2e50d11284214e67b25b68e6a1c48da46e5e4d4d0c54c2ec18f88d292224b4541fb279396cf7b94beac9', + 'earthquakes.json': 'f098b6437411384b552419b4a36264c1bb3fed816ccfe9545145175e0b92a0b7ad5ebdcb9dddd0a12a90499143ffa471c02f6e049be5b973db607ff066892500', + 'earthquakes-video.webm': '834a9d05f5fb00145e529fa4b217398db0df188c69d751f3869128b6e9c92d3000f85378752c56d9d9b5fa0870437dd9bdfeb5d62f6c87c2c03a7f1a20ee8523', + 'grid.jpg': '60d201a14c7d31e7881301e6784e0372ddf27f26e5e4eafba9ba39158dfd050e3683faaa660fcde47e6c994dd3ee64c5a5231474ca75090053ef9207fedd9029', + 'hurricanes.json': '012f15036bfc9ac3abb81a2a61e2c7b602ef3d8f7bd3b3f0fb66972ee69034730655f69febb2df5d657f22cd2b1e69170f6568bcbb03d7ebdecdfbafb80cc3b7', + 'land_polygons.json': '30a828392d58678599130e0dca6d7f27e7e07e4e5b5d7f7a37871eb395d53d97b76134c0a07e805fbdfac0f42e6d3ca6e287c9727815a9dc711d541b1c8f68a1', + 'land_shallow_topo_2048.png': '8a8330dba5bacdb511038ad0f6ee5a764a40aa7a8868a445749f653ae5d85d8317684ac706e7a9f049590170df6bc3fefc2912d52124d1b3b17aa43c529ff2a8', + 'noaa_prcp.json': '07b4e12f0a31c0f48ca42545e61324941be7df24bd521541250969dd3f14f4400a362601ea9ecb4220d9d3b731f01d75cf9a998682c43afbc63cc4a16c2cba2e', + 'oahu-dense.json': '692a44ec4a18b16d1530403a9a2bf286ff2ef7d45fa58f555c278f91a0fa708b5626a3f38955e06a11cdc06a1009e859328687c1f32a2169e0c8ef1b518418c1', + 'oahu.json': 'e44282c44fa95f0b40c2135ee94ff40a755771f3b4bf9acda0eef0048cd0fb29b3a71352e2eecdca6cfc35fafde96ffb1a9658c9fee2346071808a9123c26cb5', + 'oahu-medium.json': '83375c5c2678b11d8de7f59fd8e2f9b889d3ec20f4a81279d996711bfc0942bd9dcdce149cbca995930459c26b3bf58be60569687085adf361ff805436b75aab', + 'red.jpg': '70884f5c3d1747633412dcf64d4886d985b49f508b68eb9f3874b2fdcfad38e7623f4a7c7355baa1656503f46e2e091576a19ded9fad7c7c65387c313206d2e6', + 'roads.json': 'ae8d8b99c3fab73798ddcc246ea53b1bc8d598414d00df4ce697373a343fbf7ffed4dbae5f07a997ac7731b5f86140686b8425b99bd5818b8c9ec68e7c4d3315', + 'sample.json': '5d4b00559f17fac607eafe4a9bb933386ebd572fbe545e6138b8eafbbf510074a8ae398c7df0420a017218af83b18cc322633b77007b63a27dfea7a50db70244', + 'temperature_data.tsv': 'bde5cdad7c4518694393de83da938bcf5d485b41116b51f0fce4b462737992d2973f076c577040fa816873b196c6397244aa03736fc7750a0eea0ba5cbcc48e4', + 'tilefancy.png': '455effa59d421cbb73c6def815813063f1c649363de4007fa0de00cc2e0f24cba745c046266e5a83fc43b121a648017d509d7bf03c30fbee1448817e3f849683', + 'tiles.tgz': 'b2e8af36ffa81bab566f89f9f386d4175f43619972d288c10588f9af5d20080caaed1bf44f7cd51e410850242add0101ef3f5f053b84f82771978266348136a6', + 'white.jpg': 'ea7a9d7ff76775e742572f89e90cce1248ec99c33b2f486e0fa1d19ab461b87dff324533ecb186a3db14e40a3826da97b5d66566360a201228f60140b0e89942' +}; + +function download(url, dest) { + return new Promise((resolve, reject) => { + var file = fs.createWriteStream(dest); + https.get(url, function (response) { + response.pipe(file); + file.on('finish', function () { + file.close(() => resolve(dest)); + }); + }); + }); +} + +function checksumFile(hashName, filepath) { + return new Promise((resolve, reject) => { + const hash = crypto.createHash(hashName); + const stream = fs.createReadStream(filepath); + stream.on('error', err => reject(err)); + stream.on('data', chunk => hash.update(chunk)); + stream.on('end', () => resolve(hash.digest('hex'))); + }); +} + +function untgz(filepath) { + var dest = filepath.substr(0, filepath.length - 4); + fs.mkdirSync(dest, {recursive: true}); + return tar.x({file: filepath, cwd: dest}); +} + +var base_url = 'https://data.kitware.com/api/v1/file/hashsum/{algo}/{hashvalue}/download'; +var algo = 'sha512'; + +var dest = process.argv[2]; + +fs.mkdirSync(dest, {recursive: true}); +Object.entries(registry).forEach(async ([name, hash]) => { + var outputPath = path.join(dest, name); + var url = base_url.replace('{algo}', algo).replace('{hashvalue}', hash); + var downloaded = false; + for (var tries = 0; tries < 5; tries += 1) { + if (tries) { + console.log(`Downloading ${name}.`); + await download(url, outputPath); + downloaded = true; + } + if (fs.existsSync(outputPath)) { + var existingHash = await checksumFile(algo, outputPath); + if (hash.toLowerCase() === existingHash.toLowerCase()) { + break; + } + if (tries) { + console.log(`Checksum does not match for ${name}.`); + } + } + downloaded = false; + } + if (downloaded && name.endsWith('tgz')) { + await untgz(outputPath); + } +}); diff --git a/scripts/install_cmake.sh b/scripts/install_cmake.sh deleted file mode 100755 index f4356c6359..0000000000 --- a/scripts/install_cmake.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -PREFIX="$CACHE/cmake-$CMAKE_VERSION" -if [[ ! -f "$PREFIX/bin/cmake" || -n "$UPDATE_CACHE" ]] ; then - rm -fr "$PREFIX" - mkdir -p "$PREFIX" - curl -L "http://cmake.org/files/v${CMAKE_SHORT_VERSION}/cmake-${CMAKE_VERSION}-Linux-x86_64.tar.gz" | gunzip -c | tar -x -C "$PREFIX" --strip-components 1 -fi -export PATH="$PREFIX/bin:$PATH" diff --git a/scripts/upload_notes.py b/scripts/upload_notes.py deleted file mode 100644 index ca5b445052..0000000000 --- a/scripts/upload_notes.py +++ /dev/null @@ -1,106 +0,0 @@ -from __future__ import print_function - -import json -import socket -from datetime import datetime -import os -import argparse -from StringIO import StringIO -import sys -import uuid - - -def gather_info(repo_path='.'): - try: - import git - except ImportError: - print( - 'Please install GitPython (`pip install GitPython`)', - file=sys.stderr - ) - sys.exit(1) - - repo = git.Repo(repo_path) - try: - branch = repo.active_branch.name - except Exception: - branch = os.environ.get('TRAVIS_BRANCH') - - info = { - 'uuid': str(uuid.uuid4()), - 'build_outputs': [], - 'build_timestamp': datetime.now().isoformat(), - 'commit_timestamp': repo.head.commit.committed_datetime.isoformat(), - 'datasets': [], - 'git_branch': branch, - 'git_repo_url': 'git@github.com:OpenGeoscience/geojs.git', - 'git_sha': repo.head.commit.hexsha, - 'host': socket.gethostname(), - 'regeneration_command': 'npm run test', - 'vcs': 'git' - } - if os.environ.get('TRAVIS'): - info['travis_id'] = os.environ.get('TRAVIS_BUILD_ID') - info['host'] = 'travis' - return info - - -def upload(data, bucket='geojs-build-outputs'): - # assumes credentials coming from environment variables - # such as AWS_ACCESS_KEY_ID, AWS_PROFILE, etc. - try: - import boto3 - except ImportError: - print( - 'Please install boto3 (`pip install boto3`)', - file=sys.stderr - ) - sys.exit(1) - - s3 = boto3.client('s3') - - f = StringIO() - f.write(data) - f.seek(0) - - name = datetime.now().isoformat() + '.json' - s3.upload_fileobj(f, bucket, name) - - -def main(args): - notes = json.load(open(args.notes, 'r')) - info = gather_info(args.repo) - info['data'] = notes - info['submission_timestamp'] = datetime.now().isoformat() - - data = json.dumps(info) - print(data) - - if args.upload: - upload(data) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description='Collect performance information and related metadata' - ) - - parser.add_argument( - '--repo', - default='.', - help='Path of the repository' - ) - - parser.add_argument( - '--upload', - action='store_true', - default=False, - help='Upload results to s3' - ) - - parser.add_argument( - 'notes', - help='Path to the build notes file' - ) - - main(parser.parse_args()) diff --git a/tests/cases/osmLayer.js b/tests/cases/osmLayer.js index b6e0bda0df..6657bcffc1 100644 --- a/tests/cases/osmLayer.js +++ b/tests/cases/osmLayer.js @@ -9,7 +9,6 @@ describe('geo.core.osmLayer', function () { var map; var waitForIt = require('../test-utils').waitForIt; - var submitNote = require('../test-utils').submitNote; // var logCanvas2D = require('../test-utils').logCanvas2D; var geo = require('../test-utils').geo; var createMap = require('../test-utils').createMap; @@ -29,7 +28,7 @@ describe('geo.core.osmLayer', function () { restoreWebglRenderer(); } - /* Run some performance tests and submit them as a build note. + /* Run some performance tests. * * @param mapinfo: an object that includes the map to test. * @param notekey: the key to use for the build note. @@ -99,16 +98,10 @@ describe('geo.core.osmLayer', function () { done: done }); }); - it('report findings', function (done) { + it('check findings', function () { var timings = geo.util.timeReport('requestAnimationFrame'); - // very minimal test threshold; this is mostly to collect data + // very minimal test threshold expect(timings.count).toBeGreaterThan(10); - timings = $.extend({}, timings); - delete timings.recentsub; - submitNote(notekey, timings).then(function () { - geo.util.timeRequestAnimationFrame(true); - done(); - }); }); }); } diff --git a/tests/external-data/AdderallCities2015.csv.sha512 b/tests/external-data/AdderallCities2015.csv.sha512 deleted file mode 100644 index 446210cd7b..0000000000 --- a/tests/external-data/AdderallCities2015.csv.sha512 +++ /dev/null @@ -1 +0,0 @@ -c3e984482cc6db1193a6dca2a55396a2baad8541a5c8c679f33750b76f766f40a119ec3e63abbabcd095d752c3df8ce21bf24cbe629502121f24ba90b4b0674c diff --git a/tests/external-data/base-images.tgz.sha512 b/tests/external-data/base-images.tgz.sha512 deleted file mode 100644 index bb379de6ad..0000000000 --- a/tests/external-data/base-images.tgz.sha512 +++ /dev/null @@ -1 +0,0 @@ -346dcdcf6e88aa9bfde684a311a452431af01b055f007e73839d42aa19ce0846af20d0bc296f7e1acca0af2759896d647dbbdbf07b20428f0e10464a1764c77e diff --git a/tests/external-data/blue.jpg.sha512 b/tests/external-data/blue.jpg.sha512 deleted file mode 100644 index bed718281d..0000000000 --- a/tests/external-data/blue.jpg.sha512 +++ /dev/null @@ -1 +0,0 @@ -867b1f3c568289efc7d0dba97d827a2bc4d83a7465cebcb3b5aec7bac6a38cf70d037d1814402bc97ad1f2f6737cfb5ce97db0a4fb53a716e77fd3ba57a7ab3b diff --git a/tests/external-data/cities.csv.sha512 b/tests/external-data/cities.csv.sha512 deleted file mode 100644 index d28b11f729..0000000000 --- a/tests/external-data/cities.csv.sha512 +++ /dev/null @@ -1 +0,0 @@ -5a665e5feda24f28e5cf4ed0801b67e73bbcf3ea781b2e50d11284214e67b25b68e6a1c48da46e5e4d4d0c54c2ec18f88d292224b4541fb279396cf7b94beac9 diff --git a/tests/external-data/earthquakes-video.webm.sha512 b/tests/external-data/earthquakes-video.webm.sha512 deleted file mode 100644 index e82e4f4ef4..0000000000 --- a/tests/external-data/earthquakes-video.webm.sha512 +++ /dev/null @@ -1 +0,0 @@ -834a9d05f5fb00145e529fa4b217398db0df188c69d751f3869128b6e9c92d3000f85378752c56d9d9b5fa0870437dd9bdfeb5d62f6c87c2c03a7f1a20ee8523 diff --git a/tests/external-data/earthquakes.json.sha512 b/tests/external-data/earthquakes.json.sha512 deleted file mode 100644 index 1445b31595..0000000000 --- a/tests/external-data/earthquakes.json.sha512 +++ /dev/null @@ -1 +0,0 @@ -f098b6437411384b552419b4a36264c1bb3fed816ccfe9545145175e0b92a0b7ad5ebdcb9dddd0a12a90499143ffa471c02f6e049be5b973db607ff066892500 diff --git a/tests/external-data/grid.jpg.sha512 b/tests/external-data/grid.jpg.sha512 deleted file mode 100644 index 18d0de61d7..0000000000 --- a/tests/external-data/grid.jpg.sha512 +++ /dev/null @@ -1 +0,0 @@ -60d201a14c7d31e7881301e6784e0372ddf27f26e5e4eafba9ba39158dfd050e3683faaa660fcde47e6c994dd3ee64c5a5231474ca75090053ef9207fedd9029 diff --git a/tests/external-data/hurricanes.json.sha512 b/tests/external-data/hurricanes.json.sha512 deleted file mode 100644 index ad01826d5a..0000000000 --- a/tests/external-data/hurricanes.json.sha512 +++ /dev/null @@ -1 +0,0 @@ -012f15036bfc9ac3abb81a2a61e2c7b602ef3d8f7bd3b3f0fb66972ee69034730655f69febb2df5d657f22cd2b1e69170f6568bcbb03d7ebdecdfbafb80cc3b7 diff --git a/tests/external-data/land_polygons.json.sha512 b/tests/external-data/land_polygons.json.sha512 deleted file mode 100644 index d326bce343..0000000000 --- a/tests/external-data/land_polygons.json.sha512 +++ /dev/null @@ -1 +0,0 @@ -30a828392d58678599130e0dca6d7f27e7e07e4e5b5d7f7a37871eb395d53d97b76134c0a07e805fbdfac0f42e6d3ca6e287c9727815a9dc711d541b1c8f68a1 diff --git a/tests/external-data/land_shallow_topo_2048.png.sha512 b/tests/external-data/land_shallow_topo_2048.png.sha512 deleted file mode 100644 index 0ff7e03d11..0000000000 --- a/tests/external-data/land_shallow_topo_2048.png.sha512 +++ /dev/null @@ -1 +0,0 @@ -8a8330dba5bacdb511038ad0f6ee5a764a40aa7a8868a445749f653ae5d85d8317684ac706e7a9f049590170df6bc3fefc2912d52124d1b3b17aa43c529ff2a8 diff --git a/tests/external-data/noaa_prcp.json.sha512 b/tests/external-data/noaa_prcp.json.sha512 deleted file mode 100644 index d0f4784051..0000000000 --- a/tests/external-data/noaa_prcp.json.sha512 +++ /dev/null @@ -1 +0,0 @@ -07b4e12f0a31c0f48ca42545e61324941be7df24bd521541250969dd3f14f4400a362601ea9ecb4220d9d3b731f01d75cf9a998682c43afbc63cc4a16c2cba2e diff --git a/tests/external-data/oahu-dense.json.sha512 b/tests/external-data/oahu-dense.json.sha512 deleted file mode 100644 index 7000b03856..0000000000 --- a/tests/external-data/oahu-dense.json.sha512 +++ /dev/null @@ -1 +0,0 @@ -692a44ec4a18b16d1530403a9a2bf286ff2ef7d45fa58f555c278f91a0fa708b5626a3f38955e06a11cdc06a1009e859328687c1f32a2169e0c8ef1b518418c1 diff --git a/tests/external-data/oahu-medium.json.sha512 b/tests/external-data/oahu-medium.json.sha512 deleted file mode 100644 index 9b710e2ef7..0000000000 --- a/tests/external-data/oahu-medium.json.sha512 +++ /dev/null @@ -1 +0,0 @@ -83375c5c2678b11d8de7f59fd8e2f9b889d3ec20f4a81279d996711bfc0942bd9dcdce149cbca995930459c26b3bf58be60569687085adf361ff805436b75aab diff --git a/tests/external-data/oahu.json.sha512 b/tests/external-data/oahu.json.sha512 deleted file mode 100644 index 0f433a70fd..0000000000 --- a/tests/external-data/oahu.json.sha512 +++ /dev/null @@ -1 +0,0 @@ -e44282c44fa95f0b40c2135ee94ff40a755771f3b4bf9acda0eef0048cd0fb29b3a71352e2eecdca6cfc35fafde96ffb1a9658c9fee2346071808a9123c26cb5 diff --git a/tests/external-data/red.jpg.sha512 b/tests/external-data/red.jpg.sha512 deleted file mode 100644 index 0473d8afa1..0000000000 --- a/tests/external-data/red.jpg.sha512 +++ /dev/null @@ -1 +0,0 @@ -70884f5c3d1747633412dcf64d4886d985b49f508b68eb9f3874b2fdcfad38e7623f4a7c7355baa1656503f46e2e091576a19ded9fad7c7c65387c313206d2e6 diff --git a/tests/external-data/roads.json.sha512 b/tests/external-data/roads.json.sha512 deleted file mode 100644 index 2786b0419c..0000000000 --- a/tests/external-data/roads.json.sha512 +++ /dev/null @@ -1 +0,0 @@ -ae8d8b99c3fab73798ddcc246ea53b1bc8d598414d00df4ce697373a343fbf7ffed4dbae5f07a997ac7731b5f86140686b8425b99bd5818b8c9ec68e7c4d3315 diff --git a/tests/external-data/sample.json.sha512 b/tests/external-data/sample.json.sha512 deleted file mode 100644 index 698a9e739c..0000000000 --- a/tests/external-data/sample.json.sha512 +++ /dev/null @@ -1 +0,0 @@ -5d4b00559f17fac607eafe4a9bb933386ebd572fbe545e6138b8eafbbf510074a8ae398c7df0420a017218af83b18cc322633b77007b63a27dfea7a50db70244 diff --git a/tests/external-data/temperature_data.tsv.sha512 b/tests/external-data/temperature_data.tsv.sha512 deleted file mode 100644 index a506f1f17f..0000000000 --- a/tests/external-data/temperature_data.tsv.sha512 +++ /dev/null @@ -1 +0,0 @@ -bde5cdad7c4518694393de83da938bcf5d485b41116b51f0fce4b462737992d2973f076c577040fa816873b196c6397244aa03736fc7750a0eea0ba5cbcc48e4 diff --git a/tests/external-data/tilefancy.png.sha512 b/tests/external-data/tilefancy.png.sha512 deleted file mode 100644 index dc074aa7e7..0000000000 --- a/tests/external-data/tilefancy.png.sha512 +++ /dev/null @@ -1 +0,0 @@ -455effa59d421cbb73c6def815813063f1c649363de4007fa0de00cc2e0f24cba745c046266e5a83fc43b121a648017d509d7bf03c30fbee1448817e3f849683 diff --git a/tests/external-data/tiles.tgz.sha512 b/tests/external-data/tiles.tgz.sha512 deleted file mode 100644 index bcfbe60782..0000000000 --- a/tests/external-data/tiles.tgz.sha512 +++ /dev/null @@ -1 +0,0 @@ -b2e8af36ffa81bab566f89f9f386d4175f43619972d288c10588f9af5d20080caaed1bf44f7cd51e410850242add0101ef3f5f053b84f82771978266348136a6 diff --git a/tests/external-data/white.jpg.sha512 b/tests/external-data/white.jpg.sha512 deleted file mode 100644 index cb660aecc0..0000000000 --- a/tests/external-data/white.jpg.sha512 +++ /dev/null @@ -1 +0,0 @@ -ea7a9d7ff76775e742572f89e90cce1248ec99c33b2f486e0fa1d19ab461b87dff324533ecb186a3db14e40a3826da97b5d66566360a201228f60140b0e89942 diff --git a/tests/notes.js b/tests/notes.js deleted file mode 100755 index cb409d2af4..0000000000 --- a/tests/notes.js +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env node -/** - * A node CLI for handling build notes files. - * - * Accepts commands - */ -var fs = require('fs'); -var path = require('path'); -var script = process.argv[1]; -var args = process.argv.slice(2); -var command = args[0]; -var notes_path = args[1] || path.resolve('notes'); -var output_file = args[2] || path.resolve('build_notes.json'); - -function print_help() { - console.error('Usage: ' + script + ' reset|report|combine '); -} - -function reset() { - if (fs.existsSync(output_file)) { - fs.unlinkSync(output_file); - } - if (fs.existsSync(notes_path)) { - fs.readdirSync(notes_path).forEach(function (f) { - fs.unlinkSync(path.resolve(notes_path, f)); - }); - fs.rmdirSync(notes_path); - } - fs.mkdirSync(notes_path); -} - -function combine() { - var notes = {}; - if (!fs.existsSync(notes_path)) { - return {}; - } - fs.readdirSync(notes_path).forEach(function (f) { - var content = JSON.parse(fs.readFileSync(path.resolve(notes_path, f))); - notes[f.replace(/\.json$/, '')] = content; - }); - return notes; -} - -if (command === 'report') { - console.log(JSON.stringify(combine(), null, 2)); -} else if (command === 'combine') { - fs.writeFileSync(output_file, JSON.stringify(combine(), null, 2)); -} else if (command === 'reset') { - reset(); -} else if (command === 'help') { - print_help(); -} else { - console.error('Invalid arguments provided'); - print_help(); - process.exit(1); -} diff --git a/tests/runners/baseline_images.py b/tests/runners/baseline_images.py index 5a50c3ed12..e7a2383726 100755 --- a/tests/runners/baseline_images.py +++ b/tests/runners/baseline_images.py @@ -10,10 +10,12 @@ def generate_baselines(args): """ - Generate baseline image tarball by running ctest in the build directory. + Generate baseline image tarball. :param args: a dictionary of arguments, including: - xvfb: if True, run ctest within xvfb-run. + xvfb: if True, run ci-xvfb instead of ci. + make: if "existing", use existing images to create the tarball rather + than running ci or ci-xvfb. build: the build directory; created if it does not exist. verbose: the verbosity level. """ @@ -25,30 +27,36 @@ def generate_baselines(args): os.makedirs(buildPath) if not os.path.isdir(buildPath): raise Exception('build path is not a directory') - os.chdir(buildPath) - if not os.path.exists(os.path.join(buildPath, 'CMakeFiles')): - cmd = ['cmake', cwd] - if args['verbose'] >= 1: - print('Running cmake: %s' % subprocess.list2cmdline(cmd)) - subprocess.check_call(cmd) - tarPath = os.path.join(buildPath, 'base-images.tgz') + tarName = 'base-images.tgz' + tarPath = os.path.join(buildPath, tarName) if os.path.exists(tarPath): os.unlink(tarPath) - cmd = ['ctest', '-C', 'baseline_images', '-R', 'baseline_images', - '--output-on-failure'] - if args.get('xvfb'): - cmd = ['xvfb-run', '-s', '-ac -screen 0 1280x1024x24', 'bash', '-c', - subprocess.list2cmdline(cmd)] + if args['make'] != 'existing': + cmd = ['npm', 'run', 'ci-xvfb' if args.get('xvfb') else 'ci'] + if args['verbose'] >= 1: + print('Generating baselines: %s' % subprocess.list2cmdline(cmd)) + subprocess.check_call(cmd) + os.chdir(buildPath) + cmd = ['bash', '-c', + "find images -name '*.png' -a -not -name '*-test.png' -a -not " + "-name '*-diff.png' -a -not -name '*-base.png' -a -not " + "-name '*-screen.png' -print0 | xargs -0 -n 1 -P 8 optipng || true"] if args['verbose'] >= 1: - print('Running ctest: %s' % subprocess.list2cmdline(cmd)) + print('Optimizing output: %s' % subprocess.list2cmdline(cmd)) + subprocess.check_call(cmd) + cmd = ['tar', '-zcvf', tarPath, '--exclude=*-test.png', + '--exclude=*-diff.png', '--exclude=*-base.png', + '--exclude=*-screen.png', '-C', 'images', '.'] + if args['verbose'] >= 1: + print('Making tar file: %s' % subprocess.list2cmdline(cmd)) subprocess.check_call(cmd) - os.chdir(cwd) tarSize = os.path.getsize(tarPath) if args['verbose'] >= 1: print('Created baseline image tgz file, %d bytes' % tarSize) + os.chdir(cwd) if args.get('copy'): name = 'Baseline Images %s.tgz' % time.strftime( - '%Y-%m-%d %H:%M:%S', time.localtime(os.path.getmtime(tarPath))) + '%Y-%m-%d %H-%M-%S', time.localtime(os.path.getmtime(tarPath))) copiedTarPath = os.path.join(buildPath, name) shutil.copy2(tarPath, copiedTarPath) if args['verbose'] >= 1: @@ -88,11 +96,15 @@ def upload_baselines(args): stream=open(tarPath, 'rb'), size=tarSize, mimeType='application/tar+gzip') if args['verbose'] >= 1: print('Upload to file %s' % uploadedFile['_id']) - testDataPath = os.path.abspath('tests/external-data') - if not os.path.isdir(testDataPath): + testDataPath = os.path.abspath('scripts/datastore.js') + if not os.path.isfile(testDataPath): raise Exception('Cannot update test-data information.') sha512 = gc.getFile(uploadedFile['_id'])['sha512'] - open(os.path.join(testDataPath, 'base-images.tgz.sha512'), 'w').write(sha512) + ds = open(testDataPath).read() + start, rest = ds.split("'base-images.tgz': ", 1) + rest, end = rest.split(',', 1) + ds = start + ("'base-images.tgz': '%s'," % sha512) + end + open(testDataPath, 'w').write(ds) if args['verbose'] >= 1: print('test-data references updated') @@ -104,18 +116,20 @@ def upload_baselines(args): 'of the geojs repository.') parser.add_argument( '--xvfb', '-x', dest='xvfb', action='store_true', - help='Run xvfb-run when running ctest to generate the baseline ' - 'images. Only applies if the images are generated.') + help='Run xvfb-run when generating the baseline images. Only applies ' + 'if the images are generated.') parser.add_argument( '--no-xvfb', dest='xvfb', action='store_false', help='Do not use xvfb-run when generating baseline images.') parser.add_argument( '--generate', '-g', dest='make', action='store_true', - help='Generate baseline images by running "ctest -C baseline_images ' - '-R baseline_images".') + help='Generate baseline images by running "npm ci".') parser.add_argument( '--no-generate', dest='make', action='store_false', help='Do not generate baseline images.') + parser.add_argument( + '--existing-images', '-e', dest='make', action='store_const', + const='existing', help='Create a baseline file from existing images.') parser.add_argument( '--build', '-b', default='.', help='The build directory. This is created if baseline images are ' diff --git a/tests/test-utils.js b/tests/test-utils.js index c7bd09b266..0d1935848f 100644 --- a/tests/test-utils.js +++ b/tests/test-utils.js @@ -1,12 +1,11 @@ /* These are functions we want available to jasmine tests. */ -/* exported waitForIt, closeToArray, closeToEqual, logCanvas2D, submitNote */ +/* exported waitForIt, closeToArray, closeToEqual, logCanvas2D */ /* global CanvasRenderingContext2D */ var $ = require('jquery'); require('../src/polyfills'); var geo = require('../src'); -var bowser = require('bowser'); module.exports = {}; @@ -129,23 +128,6 @@ module.exports.logCanvas2D = function logCanvas2D(enable) { window._canvasLog = log; }; -/** - * Send data to be reported as part of the a build note. - * - * @param key: the key that this will be reported under. This should be the - * name of the test. - * @param note: the data to send. This will be converted to JSON. - */ -module.exports.submitNote = function submitNote(key, note) { - note.browser = bowser; - return $.ajax({ - url: '/notes?key=' + encodeURIComponent(key), - data: JSON.stringify(note), - method: 'PUT', - contentType: 'application/json' - }); -}; - var origRequestAnimationFrame = window.requestAnimationFrame, origCancelAnimationFrame = window.cancelAnimationFrame, animFrameCallbacks = [];