Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions .github/workflows/binder-badge.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
name: Binder Badge
on:
pull_request_target:
types: [opened]

jobs:
binder:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- uses: jupyterlab/maintainer-tools/.github/actions/binder-link@v1
with:
github_token: ${{ secrets.github_token }}
13 changes: 7 additions & 6 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}-${{hashFiles('**/requirements.txt')}}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
- name: Install dependencies (docs)
run: |
sudo apt-get install -y pandoc
python -m pip install --upgrade pip
Expand All @@ -49,7 +49,7 @@ jobs:
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}-${{hashFiles('**/requirements.txt')}}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
- name: Install dependencies (js)
run: |
python -m pip install --upgrade pip

Expand Down Expand Up @@ -116,12 +116,13 @@ jobs:
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}-${{hashFiles('**/requirements.txt')}}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
- name: Install dependencies (pytest)
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade pip 'setuptools<82'
python -m pip install jupyterlab~=3.0
python -m pip install --upgrade --upgrade-strategy=eager ".[test]"
python -m pip install jupyter_server${{ matrix.jupyter_server-version }}
python -m pip install 'setuptools<82'
- name: Test with pytest (Linux)
if: startsWith(matrix.os, 'ubuntu')
run: |
Expand Down Expand Up @@ -213,11 +214,11 @@ jobs:
path: |
${{ github.workspace }}/pw-browsers
key: ${{ runner.os }}-${{ hashFiles('ui-tests/package-lock.json') }}

- name: Install browser
working-directory: ui-tests
run: npx playwright install chromium

- name: Run playwright tests
working-directory: ui-tests
run: |
Expand Down
15 changes: 15 additions & 0 deletions binder/environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
name: example-environment
channels:
- conda-forge
- nodefaults
dependencies:
- jupyterlab >=4.0.0
- nodejs >=25,<26
- python >=3.14
- yarn >=3,<4
# build
- hatchling >=1.5.0
- hatch-jupyter-builder >=0.3.2
- hatch-nodejs-version
# dependencies
- jupyter_server >=2.0.0
7 changes: 7 additions & 0 deletions binder/jupyter_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import logging

c.ServerApp.log_level = logging.DEBUG

c.ContentsManager.allow_hidden = True
41 changes: 41 additions & 0 deletions binder/postBuild
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This seem to be doing about the same, except it's using a super complicated python script, instead of setting bash -x to each which commands are ran.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, I am not opinionated on the language, whichever does the job.

Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
#!/usr/bin/env bash
# perform a development install of nbdime
#
# On Binder, this will run _after_ the environment has been fully created from
# the environment.yml in this directory.
#
# This script should also run locally on Linux/MacOS:
#
# bash binder/postBuild

set -euox pipefail

# verify the environment is self-consistent before even starting
pip check

# install the labextension
pip install -e .
jupyter labextension develop --overwrite .
jupyter server extension enable nbdime

#
mkdir -p ~/.jupyter/

cp binder/jupyter_config.py ~/.jupyter/

# verify the extension didn't break anything
pip check

# list the extensions
jupyter troubleshoot
jupyter notebook --show-config
jupyter lab --show-config
jupyter server extension list
jupyter labextension list


echo ""
echo "JupyterLab with nbdime is ready to run with:"
echo ""
echo " jupyter lab"
echo ""
31 changes: 23 additions & 8 deletions nbdime/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import glob
import io
import re
import threading
from collections.abc import Sequence
from subprocess import Popen, TimeoutExpired
import sys
Expand Down Expand Up @@ -220,18 +221,32 @@ def nbdime_base_url():


@fixture
def app(nbdime_base_url, filespath):
"""This is a fixture used by the pytest-tornado plugin.

It is indirectly called by all tests that use the `gen_test`
test mark.
"""
def web_server(nbdime_base_url, filespath):
"""Start a Tornado web server in a background thread and yield the base URL."""
from tornado import ioloop
from nbdime.webapp.nbdimeserver import init_app
return init_app(

port_holder = {}

def on_port(port):
port_holder['port'] = port

app, server = init_app(
on_port=on_port,
base_url=nbdime_base_url,
port=0,
cwd=filespath,
)[0]
)
loop = ioloop.IOLoop.current()
thread = threading.Thread(target=loop.start, daemon=True)
thread.start()

base_url = 'http://localhost:%d' % port_holder['port']
yield base_url

server.stop()
loop.add_callback(loop.stop)
thread.join(timeout=5)


@fixture
Expand Down
44 changes: 19 additions & 25 deletions nbdime/tests/test_web.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
import json

import pytest
import requests
from tornado import ioloop
from tornado.httputil import url_concat
from tornado.escape import json_encode, json_decode
import nbformat

import nbdime.webapp.nbdiffweb
Expand Down Expand Up @@ -73,26 +73,23 @@ def test_merge_web(filespath, unique_port, reset_log, ioloop_patch):


@pytest.mark.timeout(timeout=WEB_TEST_TIMEOUT)
@pytest.mark.gen_test
def test_fetch_diff(http_client, base_url, nbdime_base_url):
def test_fetch_diff(web_server, nbdime_base_url):
url = url_concat(
base_url + nbdime_base_url + '/diff',
web_server + nbdime_base_url + '/diff',
dict(base=diff_a, remote=diff_b))
response = yield http_client.fetch(url)
assert response.code == 200
response = requests.get(url)
assert response.status_code == 200


@pytest.mark.timeout(timeout=WEB_TEST_TIMEOUT)
@pytest.mark.gen_test
def test_api_diff(http_client, base_url, nbdime_base_url, diff_validator, filespath, auth_header):
def test_api_diff(web_server, nbdime_base_url, diff_validator, filespath, auth_header):
post_data = dict(base=diff_a, remote=diff_b)
body = json_encode(post_data)

url = base_url + nbdime_base_url + '/api/diff'
response = yield http_client.fetch(url, method='POST', headers=auth_header, body=body)
assert response.code == 200
url = web_server + nbdime_base_url + '/api/diff'
response = requests.post(url, json=post_data, headers=auth_header)
assert response.status_code == 200
# Check that response is sane:
data = json_decode(response.body)
data = response.json()
# Check that base is as expected:
expected_base = nbformat.read(os.path.join(filespath, diff_a), as_version=4)
assert json.dumps(data['base'], sort_keys=True) == json.dumps(expected_base, sort_keys=True)
Expand All @@ -101,26 +98,23 @@ def test_api_diff(http_client, base_url, nbdime_base_url, diff_validator, filesp


@pytest.mark.timeout(timeout=WEB_TEST_TIMEOUT)
@pytest.mark.gen_test
def test_fetch_merge(http_client, base_url, nbdime_base_url):
def test_fetch_merge(web_server, nbdime_base_url):
url = url_concat(
base_url + nbdime_base_url + '/merge',
web_server + nbdime_base_url + '/merge',
dict(base=merge_a, local=merge_b, remote=merge_c))
response = yield http_client.fetch(url)
assert response.code == 200
response = requests.get(url)
assert response.status_code == 200


@pytest.mark.timeout(timeout=WEB_TEST_TIMEOUT)
@pytest.mark.gen_test
def test_api_merge(http_client, base_url, nbdime_base_url, merge_validator, filespath, auth_header):
def test_api_merge(web_server, nbdime_base_url, merge_validator, filespath, auth_header):
post_data = dict(base=merge_a, local=merge_b, remote=merge_c)
body = json_encode(post_data)

url = base_url + nbdime_base_url + '/api/merge'
response = yield http_client.fetch(url, method='POST', headers=auth_header, body=body)
assert response.code == 200
url = web_server + nbdime_base_url + '/api/merge'
response = requests.post(url, json=post_data, headers=auth_header)
assert response.status_code == 200
# Check that response is sane:
data = json_decode(response.body)
data = response.json()
# Check that base is as expected:
expected_base = nbformat.read(os.path.join(filespath, merge_a), as_version=4)
assert json.dumps(data['base'], sort_keys=True) == json.dumps(expected_base, sort_keys=True)
Expand Down
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ test = [
"pytest>=6.0",
"pytest-cov",
"pytest-timeout",
"pytest-tornado",
"jupyter_server[test]",
"jsonschema",
"mock",
Expand Down
Loading