Skip to content

Commit d5e172e

Browse files
author
Anirudh Apparaju
committed
Merge remote-tracking branch 'upstream/develop' into develop-2023-07
2 parents ef7783c + aa8d194 commit d5e172e

168 files changed

Lines changed: 17650 additions & 7118 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.github/workflows/python-package.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,4 +89,4 @@ jobs:
8989

9090
DA_GCP_LABELBOX_API_KEY: ${{ secrets[matrix.da-test-key] }}
9191
run: |
92-
tox -e py -- -n 10 -svv --reruns 5 --reruns-delay 10
92+
tox -e py -- -n 10 -svv --reruns 5 --reruns-delay 8

.pre-commit-config.yaml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
repos:
22
- repo: https://github.com/google/yapf
3-
rev: v0.31.0
3+
rev: v0.40.1
44
hooks:
55
- id: yapf
66
name: "yapf"
77
args: [-i, --style, google]
8-
- repo: https://github.com/datarootsio/databooks
9-
rev: 1.0.1
10-
hooks:
11-
- id: databooks-meta
12-
args: [examples, --overwrite]
8+
- repo: https://github.com/datarootsio/databooks
9+
rev: 1.0.1
10+
hooks:
11+
- id: databooks-meta
12+
args: [examples, --overwrite]

.readthedocs.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,9 @@ version: 2
99
sphinx:
1010
configuration: docs/source/conf.py
1111

12-
# Build all formats (epub, pdf, htmlzip)
13-
formats:
14-
- pdf
12+
# Not building additional formats, as PDF build is failing
13+
# formats:
14+
# - pdf
1515

1616
# Optionally set the version of Python and requirements required to build your docs
1717
python:

CHANGELOG.md

Lines changed: 135 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,137 @@
11
# Changelog
2+
# Version 3.49.1 (2023-06-29)
3+
## Fixed
4+
* Removed numpy version lock that caused Python version >3.8 to download incompatible numpy version
5+
6+
# Version 3.49.0 (2023-06-27)
7+
8+
## Changed
9+
* Improved batch creation logic when more than 1000 global keys provided
10+
11+
## Notebooks
12+
* Added example on how to access mark in export v2
13+
* Removed NDJSON library from `examples/basics/custom_embeddings.ipynb`
14+
* Removed `queue_mode` property from `create_project()` method call.
15+
16+
# Version 3.48.0 (2023-06-13)
17+
## Added
18+
* Support for ISO format to exports V2 date filters
19+
* Support to specify confidence for all free-text annotations
20+
21+
## Changed
22+
* Removed backports library and replaced it with python dateutil package to parse iso strings
23+
24+
## Notebooks
25+
* Added predictions to model run example
26+
* Added notebook to run yolov8 and sam on video and upload to LB
27+
* Updated google colab notebooks to reflect raster segmentation tool being released on 6/13
28+
* Updated radio NDJSON annotations format to support confidence
29+
* Added confidence to all free-text annotations (ndjson)
30+
* Fixed issues with cv2 library rooting from the Geospatial notebook used a png map with a signed URL with an expired token
31+
32+
# Version 3.47.1 (2023-05-24)
33+
## Fixed
34+
* Loading of the ndjson parser when optional [data] libraries (geojson etc.) are not installed
35+
36+
# Version 3.47.0 (2023-05-23)
37+
## Added
38+
* Support for interpolated frames to export v2
39+
40+
## Changed
41+
* Removed ndjson library and replaced it with a custom ndjson parser
42+
43+
## Notebooks
44+
* Removed confidence scores in annotations - video notebook
45+
* Removed raster seg masks from video prediction
46+
* Added export v2 example
47+
* Added SAM and Labelbox connector notebook
48+
49+
# Version 3.46.0 (2023-05-03)
50+
## Added
51+
* Global key support to DataRow Metadata `bulk_upsert()` function
52+
53+
## Notebooks
54+
* Removed dataset based projects from project setup notebook
55+
* Updated all links to annotation import and prediction notebooks in examples README
56+
57+
# Version 3.45.0 (2023-04-27)
58+
## Changed
59+
* Reduce threshold for async batch creation to 1000 data rows
60+
61+
## Notebooks
62+
* Added subclassifications to ontology notebook
63+
* Added conversational and pdf predictions notebooks
64+
65+
# Version 3.44.0 (2023-04-26)
66+
67+
## Added
68+
* `predictions` param for optionally exporting predictions in model run export v2
69+
* Limits on `model_run_ids` and `project_ids` on catalog export v2 params
70+
* `WORKFLOW_ACTION` webhook topic
71+
* Added `data_row_ids` filter for dataset and project export v2
72+
73+
## Fixed
74+
* ISO timestamp parsing for datetime metadata
75+
* Docstring typo for `client.delete_feature_schema_from_ontology()`
76+
77+
## Notebooks
78+
* Removed mention of embeddings metadata fields
79+
* Fixed broken colab link on `examples/extras/classification-confusion-matrix.ipynb`
80+
* Added free text classification example to video annotation import notebook
81+
* Updated prediction_upload notebooks with Annotation Type examples
82+
83+
# Version 3.43.0 (2023-04-05)
84+
85+
## Added
86+
* Nested object classifications to `VideoObjectAnnotation`
87+
* Relationship Annotation Types
88+
* Added `project_ids` and `model_run_ids` to params in all export_v2 functions
89+
90+
## Fixed
91+
* VideoMaskAnnotation annotation import
92+
93+
## Notebooks
94+
* Added DICOM annotation import notebook
95+
* Added audio annotation import notebook
96+
* Added HTML annotation import notebook
97+
* Added relationship examples to annotation import notebooks
98+
* Added global video classification example
99+
* Added nested classification examples
100+
* Added video mask example
101+
* Added global key and LPOs to queue management notebook
102+
103+
# Version 3.42.0 (2023-03-22)
104+
105+
## Added
106+
* Message based classifications with annotation types for conversations
107+
* Video and raster segmentation annotation types
108+
* Global key support to `ConversationEntity`, `DocumentEntity` and `DicomSegments`
109+
* DICOM polyline annotation type
110+
* Confidence attribute to classification annotations
111+
112+
## Changed
113+
* Increased metadata string size limit to 4096 chars
114+
* Removed `deletedDataRowGlobalKey` from `get_data_row_ids_for_global_keys()`
115+
116+
## Fixed
117+
* Annotation data type coercion by Pydantic
118+
* Error message when end point coordinates are smaller than start point coordinates
119+
* Some typos in error messages
120+
121+
## Notebooks
122+
* Refactored video notebook to include annotation types
123+
* Replaced data row ids with global keys in notebooks
124+
* Replaced `create_data_row` with `create_data_rows` in notebooks
125+
126+
# Version 3.41.0 (2023-03-15)
127+
128+
## Added
129+
* New data classes for creating labels: `AudioData`, `ConversationData`, `DicomData`, `DocumentData`, `HTMLData`
130+
* New `DocumentEntity` annotation type class
131+
* New parameter `last_activity_end` to `Project.export_labels()`
132+
133+
## Notebooks
134+
* Updated `annotation_import/pdf.ipynb` with example use of `DocumentEntity` class
2135

3136
# Version 3.40.1 (2023-03-10)
4137

@@ -26,7 +159,7 @@
26159
* Updated `annotation_import/pdf.ipynb` with more examples
27160
* Added `integrations/huggingface/huggingface.ipynb`
28161
* Fixed broken links for detectron notebooks in README
29-
* Added Dataset QueueMode during project creation in `integrations/detectron2/coco_object.ipynb`
162+
* Added Dataset QueueMode during project creation in `integrations/detectron2/coco_object.ipynb`
30163
* Removed metadata and updated ontology in `annotation_import/text.ipynb`
31164
* Removed confidence scores in `annotation_import/image.ipynb`
32165
* Updated custom embedding tutorial links in `basics/data_row_metadata.ipynb`
@@ -71,7 +204,7 @@
71204

72205
# Version 3.37.0 (2023-02-08)
73206
## Added
74-
* New `last_activity_start` param to `project.export_labels()` for filtering which labels are exported. See docstring for more on how this works.
207+
* New `last_activity_start` param to `project.export_labels()` for filtering which labels are exported. See docstring for more on how this works.
75208

76209
## Changed
77210
* Rename `Classification.instructions` to `Classification.name`

CONTRIB.md

Lines changed: 50 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,25 @@
11
# Labelbox Python SDK Contribution Guide
22

3+
## Contribution Guidelines
4+
Thank you for expressing your interest in contributing to the Labelbox SDK.
5+
To ensure that your contribution aligns with our guidelines, please carefully
6+
review the following considerations before proceeding:
7+
8+
* For feature requests, we recommend consulting with Labelbox support or
9+
creating a [Github Issue](https://github.com/Labelbox/labelbox-python/issues) on our repository.
10+
* We can only accept general solutions that address common issues rather than solutions
11+
designed for specific use cases. Acceptable contributions include simple bug fixes and
12+
improvements to functions within the schema/ package.
13+
* Please ensure that any new libraries are compliant with the Apache license that governs the Labelbox SDK.
14+
* Ensure that you update any relevant docstrings and comments within your code
15+
316
## Repository Organization
417

518
The SDK source (excluding tests and support tools) is organized into the
619
following packages/modules:
20+
* `data/` package contains code that maps annotations (labels or pre-labels) to
21+
Python objects, as well as serialization and deserialization tools for converting
22+
between NDJson and Annotation Types.
723
* `orm/` package contains code that supports the general mapping of Labelbox
824
data to Python objects. This includes base classes, attribute (field and
925
relationship) classes, generic GraphQL queries etc.
@@ -25,58 +41,52 @@ following packages/modules:
2541
* Approved PRs are merged to the `develop` branch.
2642
* The `develop` branch is merged to `master` on each release.
2743

28-
## Commits
44+
## Formatting
2945

3046
Before making a commit, to automatically adhere to our formatting standards,
3147
install and activate [pre-commit](https://pre-commit.com/)
48+
```shell
49+
pip install pre-commit
50+
pre-commit install
51+
```
52+
After the above, running `git commit ...` will attempt to fix formatting,
53+
and make necessary changes to files. You will then need to stage those files again.
54+
55+
You may also manually format your code by running the following:
56+
```shell
57+
yapf tests labelbox -i --verbose --recursive --parallel --style "google"
58+
```
3259

33-
After the above, running `git commit ...` will attempt to fix formatting. If
34-
there was formatted needed, you will need to re-add and re-commit before pushing.
3560

3661
## Testing
3762

38-
Currently, the SDK functionality is tested using integration tests. These tests
39-
communicate with a Labelbox server (by default the staging server) and are in
40-
that sense not self-contained. Besides, that they are organized like unit test
41-
and are based on the `pytest` library.
63+
Currently, the SDK functionality is tested using unit and integration tests.
64+
The integration tests communicate with a Labelbox server (by default the staging server)
65+
and are in that sense not self-contained.
66+
67+
Please consult "Testing" section in the README for more details on how to test.
4268

43-
To execute tests you will need to provide an API key for the server you're using
69+
Additionally, to execute tests you will need to provide an API key for the server you're using
4470
for testing (staging by default) in the `LABELBOX_TEST_API_KEY` environment
4571
variable. For more info see [Labelbox API key docs](https://labelbox.helpdocs.io/docs/api/getting-started).
4672

47-
To pass tests, code must be formatted. If pre-commit was not installed,
48-
you will need to use the following command:
49-
50-
```shell
51-
yapf tests labelbox -i --verbose --recursive --parallel --style "google"
52-
```
5373

5474
## Release Steps
5575

56-
Each release should follow the following steps:
57-
58-
1. Update the Python SDK package version in `REPO_ROOT/setup.py`
59-
2. Make sure the `CHANGELOG.md` contains appropriate info
60-
3. Commit these changes and tag the commit in Git as `vX.Y`
61-
4. Merge `develop` to `master` (fast-forward only).
62-
5. Create a GitHub release.
63-
6. This will kick off a Github Actions workflow that will:
64-
- Build the library in the [standard way](https://packaging.python.org/tutorials/packaging-projects/#generating-distribution-archives)
65-
- Upload the distribution archives in the [standard way](https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives)
66-
- with credentials for the `labelbox` PyPI user.
67-
68-
## Running Jupyter Notebooks
69-
70-
We have plenty of good samples in the _examples_ directory and using them for testing can help us increase our productivity. One way to use jupyter notebooks is to run the jupyter server locally (another way is to use a VSC plugin, not documented here). It works really fast.
71-
72-
Make sure your notebook will use your source code:
73-
1. `ipython profile create`
74-
2. `ipython locate` - will show where the config file is. This is the config file used by the jupyter server, since it runs via ipython
75-
3. Open the file (this should be ipython_config.py and it is usually located in ~/.ipython/profile_default) and add the following line of code:
76-
```
77-
c.InteractiveShellApp.exec_lines = [
78-
'import sys; sys.path.insert(0, "<labelbox-python root folder>")'
79-
]
80-
```
81-
4. Go to the root of your project and run `jupyter notebook` to start the server
76+
Please consult the Labelbox team for releasing your contributions
77+
78+
## Running Jupyter Notebooks
79+
80+
We have plenty of good samples in the _examples_ directory and using them for testing can help us increase our productivity. One way to use jupyter notebooks is to run the jupyter server locally (another way is to use a VSC plugin, not documented here). It works really fast.
81+
82+
Make sure your notebook will use your source code:
83+
1. `ipython profile create`
84+
2. `ipython locate` - will show where the config file is. This is the config file used by the jupyter server, since it runs via ipython
85+
3. Open the file (this should be ipython_config.py and it is usually located in ~/.ipython/profile_default) and add the following line of code:
86+
```
87+
c.InteractiveShellApp.exec_lines = [
88+
'import sys; sys.path.insert(0, "<labelbox-python root folder>")'
89+
]
90+
```
91+
4. Go to the root of your project and run `jupyter notebook` to start the server
8292

Dockerfile

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,8 @@ RUN apt install -y libsm6 \
88
libfontconfig1 \
99
libxrender1 \
1010
libgl1-mesa-glx \
11-
libgeos-dev
11+
libgeos-dev \
12+
gcc
1213

1314
WORKDIR /usr/src/
1415
COPY requirements.txt /usr/src/

Makefile

Lines changed: 20 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,45 +1,51 @@
11

2-
build:
2+
build-image:
33
docker build -t local/labelbox-python:test .
44

5+
test-local: build-image
56

6-
test-local: build
7-
8-
@# if PATH_TO_TEST we asuume you know what you are doing
7+
@# if PATH_TO_TEST we assume you know what you are doing
98
@if [ -z ${PATH_TO_TEST} ]; then \
109
./scripts/ensure_local_setup.sh; \
1110
fi
1211

13-
docker run -it -v ${PWD}:/usr/src -w /usr/src \
12+
docker run -it --rm -v ${PWD}:/usr/src -w /usr/src \
1413
-e LABELBOX_TEST_ENVIRON="local" \
1514
-e DA_GCP_LABELBOX_API_KEY=${DA_GCP_LABELBOX_API_KEY} \
1615
-e LABELBOX_TEST_API_KEY_LOCAL=${LABELBOX_TEST_API_KEY_LOCAL} \
1716
local/labelbox-python:test pytest $(PATH_TO_TEST)
1817

19-
test-staging: build
20-
docker run -it -v ${PWD}:/usr/src -w /usr/src \
18+
test-staging: build-image
19+
docker run -it --rm -v ${PWD}:/usr/src -w /usr/src \
2120
-e LABELBOX_TEST_ENVIRON="staging" \
2221
-e DA_GCP_LABELBOX_API_KEY=${DA_GCP_LABELBOX_API_KEY} \
2322
-e LABELBOX_TEST_API_KEY_STAGING=${LABELBOX_TEST_API_KEY_STAGING} \
24-
local/labelbox-python:test pytest -n 10 $(PATH_TO_TEST)
23+
local/labelbox-python:test pytest $(PATH_TO_TEST)
24+
25+
test-staging-eu: build-image
26+
docker run -it --rm -v ${PWD}:/usr/src -w /usr/src \
27+
-e LABELBOX_TEST_ENVIRON="staging-eu" \
28+
-e DA_GCP_LABELBOX_API_KEY=${DA_GCP_LABELBOX_API_KEY} \
29+
-e LABELBOX_TEST_API_KEY_STAGING_EU=${LABELBOX_TEST_API_KEY_STAGING_EU} \
30+
local/labelbox-python:test pytest $(PATH_TO_TEST)
2531

26-
test-prod: build
27-
docker run -it -v ${PWD}:/usr/src -w /usr/src \
32+
test-prod: build-image
33+
docker run -it --rm -v ${PWD}:/usr/src -w /usr/src \
2834
-e LABELBOX_TEST_ENVIRON="prod" \
2935
-e DA_GCP_LABELBOX_API_KEY=${DA_GCP_LABELBOX_API_KEY} \
3036
-e LABELBOX_TEST_API_KEY_PROD=${LABELBOX_TEST_API_KEY_PROD} \
3137
local/labelbox-python:test pytest $(PATH_TO_TEST)
3238

33-
test-onprem: build
34-
docker run -it -v ${PWD}:/usr/src -w /usr/src \
39+
test-onprem: build-image
40+
docker run -it --rm -v ${PWD}:/usr/src -w /usr/src \
3541
-e LABELBOX_TEST_ENVIRON="onprem" \
3642
-e DA_GCP_LABELBOX_API_KEY=${DA_GCP_LABELBOX_API_KEY} \
3743
-e LABELBOX_TEST_API_KEY_ONPREM=${LABELBOX_TEST_API_KEY_ONPREM} \
3844
-e LABELBOX_TEST_ONPREM_HOSTNAME=${LABELBOX_TEST_ONPREM_HOSTNAME} \
3945
local/labelbox-python:test pytest $(PATH_TO_TEST)
4046

41-
test-custom: build
42-
docker run -it -v ${PWD}:/usr/src -w /usr/src \
47+
test-custom: build-image
48+
docker run -it --rm -v ${PWD}:/usr/src -w /usr/src \
4349
-e LABELBOX_TEST_ENVIRON="custom" \
4450
-e DA_GCP_LABELBOX_API_KEY=${DA_GCP_LABELBOX_API_KEY} \
4551
-e LABELBOX_TEST_API_KEY_CUSTOM=${LABELBOX_TEST_API_KEY_CUSTOM} \

docs/source/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
copyright = '2021, Labelbox'
2222
author = 'Labelbox'
2323

24-
release = '3.40.1'
24+
release = '3.49.1'
2525

2626
# -- General configuration ---------------------------------------------------
2727

0 commit comments

Comments
 (0)