Skip to content

Commit 5237390

Browse files
authored
update master (#285)
* hashes of downloaded models added * fix bugs in diplomas * change features for diploma classifier * Remove unused API parameters * Move all remaining readers from docreader (#273) * TLDR-340 renamed pdf folder; some refactoring (#275) * Taining scripts transition (#274) * TLDR-350 pypi pipeline fix (#277) * TLDR-322 fix ispras_tbl_extr.jar (#279) * moved benchmarks from docreader (#280) * TLDR-336 dedoc api documentation (#281) * TLDR-372 docx bug for documents with comments (#282) * TLDR-359 push to dockerhub automatically (#283) * new version 0.9 (#284)
1 parent 3603e75 commit 5237390

File tree

385 files changed

+34113
-717
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

385 files changed

+34113
-717
lines changed

.github/check_version.py

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
import argparse
2+
import re
3+
from typing import Pattern
4+
5+
6+
def is_correct_version(version: str, tag: str, old_version: str, regexp: Pattern) -> bool:
7+
match = regexp.match(version)
8+
9+
if match is None:
10+
print("New version doesn't match the pattern") # noqa
11+
return False
12+
13+
if not (tag.startswith("v") and tag[1:] == version):
14+
print("Tag value should be equal to version with `v` in the beginning") # noqa
15+
return False
16+
17+
return old_version < version
18+
19+
20+
if __name__ == "__main__":
21+
parser = argparse.ArgumentParser()
22+
parser.add_argument("--branch", help="Git branch to check its version", choices=["develop", "master"])
23+
parser.add_argument("--tag", help="Tag of the release", type=str)
24+
parser.add_argument("--pre_release", help="Tag of the release", choices=["true", "false"])
25+
parser.add_argument("--new_version", help="New release version", type=str)
26+
parser.add_argument("--old_version", help="Previous release version", type=str)
27+
args = parser.parse_args()
28+
29+
print(f"Old version: {args.old_version}, new version: {args.new_version}, "
30+
f"branch: {args.branch}, tag: {args.tag}, pre_release: {args.pre_release}") # noqa
31+
32+
master_version_pattern = re.compile(r"^\d+\.\d+(\.\d+)?$")
33+
develop_version_pattern = re.compile(r"^\d+\.\d+\.\d+rc\d+$")
34+
35+
correct = False
36+
if args.branch == "develop":
37+
correct = is_correct_version(args.new_version, args.tag, args.old_version, develop_version_pattern)
38+
39+
if correct and master_version_pattern.match(args.old_version) and args.new_version.split("rc")[0] <= args.old_version:
40+
correct = False
41+
print("New version should add 'rc' to the bigger version than the old one") # noqa
42+
elif correct and int(args.new_version.split("rc")[1]) == 0:
43+
correct = False
44+
print("Numeration for 'rc' should start from 1") # noqa
45+
46+
if args.pre_release == "false":
47+
correct = False
48+
print("Only pre-releases allowed on develop") # noqa
49+
50+
if args.branch == "master":
51+
correct = is_correct_version(args.new_version, args.tag, args.old_version, master_version_pattern)
52+
53+
if args.pre_release == "true":
54+
correct = False
55+
print("Pre-releases are not allowed on master") # noqa
56+
57+
assert correct
58+
print("Version is correct") # noqa

.github/workflows/docs.yaml

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,14 @@ jobs:
1919

2020
- name: Install dependencies
2121
run: |
22+
sudo apt-get install -y libreoffice
2223
python -m pip install --upgrade --no-cache-dir pip setuptools
2324
python -m pip install --exists-action=w --no-cache-dir -r requirements.txt
2425
python -m pip install --upgrade --upgrade-strategy eager --no-cache-dir .[torch,docs]
2526
2627
- name: Build documentation
2728
# Build the documentation, you can use this command locally
28-
run: python -m sphinx -T -E -W -b html -d docs/_build/doctrees -D language=en docs/source docs/_build
29+
run: |
30+
python -m sphinx -T -E -W -b html -d docs/_build/doctrees -D language=en docs/source docs/_build
31+
cd docs/source/_static/code_examples
32+
python dedoc_usage_tutorial.py

.github/workflows/release.yaml

Lines changed: 24 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,35 +1,48 @@
11
name: Publish to PyPI
22

33
on:
4-
push:
5-
branches:
6-
- develop
7-
- master
8-
paths:
9-
- VERSION # publish only when version has been changed
4+
release:
5+
types: [published]
106

117
jobs:
128
# Publish the package to PyPI https://pypi.org
139
pypi-publish:
1410
runs-on: ubuntu-latest
1511
steps:
1612
- name: Checkout repo
17-
uses: actions/checkout@v2
13+
uses: actions/checkout@v1
14+
1815
- name: Set up Python ${{ matrix.python-version }}
1916
uses: actions/setup-python@v2
2017
with:
2118
python-version: '3.9'
2219

20+
# - name: Check version correctness
21+
# run: |
22+
# python3 .github/check_version.py --branch ${{ github.event.release.target_commitish }} --tag $GITHUB_REF_NAME \
23+
# --new_version $(< VERSION) --old_version $(git cat-file -p $(git rev-parse "$GITHUB_SHA"^1):VERSION) \
24+
# --pre_release ${{ github.event.release.prerelease }}
25+
2326
- name: Install dependencies
2427
run: |
2528
python3 -m pip install --upgrade pip
2629
pip3 install build twine
2730
28-
- name: Build and publish to PyPI # TODO change to pypi instead of test pypi
31+
- name: Build and publish to PyPI
32+
if: ${{ success() }} # publish only when version passed the checks
2933
env:
30-
TWINE_USERNAME: ${{ secrets.TEST_PYPI_USERNAME }} # TODO delete TEST_ in the name of the variable
31-
TWINE_PASSWORD: ${{ secrets.TEST_PYPI_PASSWORD }} # TODO delete TEST_ in the name of the variable
34+
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
35+
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
3236
run: |
3337
python3 -m build -w
3438
twine check dist/*
35-
twine upload --repository testpypi dist/*
39+
twine upload --repository pypi dist/*
40+
41+
- name: Push to dockerhub
42+
if: ${{ success() }}
43+
run: |
44+
docker build -f docker/Dockerfile -t dedocproject/dedoc:$GITHUB_REF_NAME .
45+
docker login -u ${{ secrets.DOCKERHUB_USERNAME }} -p ${{ secrets.DOCKERHUB_PASSWORD }}
46+
docker tag dedocproject/dedoc:$GITHUB_REF_NAME dedocproject/dedoc:latest
47+
docker push dedocproject/dedoc:$GITHUB_REF_NAME
48+
docker push dedocproject/dedoc:latest

MANIFEST.in

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
include dedoc/api/static/*/*
2-
include dedoc/readers/scanned_reader/pdftxtlayer_reader/tabbypdf/jars/*
2+
include dedoc/readers/pdf_reader/pdf_txtlayer_reader/tabbypdf/jars/*
33
include docs/*

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ docker-compose up --build
5252

5353
Start Dedoc with tests:
5454
```bash
55-
tests="true" docker-compose up --build
55+
test="true" docker-compose up --build
5656
```
5757

5858
Now you can go to the localhost:1231 and look at the docs and examples.

VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
2023.05.26
1+
0.9

dedoc/api/api_args.py

Lines changed: 62 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -5,86 +5,96 @@
55

66

77
class QueryParameters(BaseModel):
8-
language: Optional[str]
8+
document_type: Optional[str]
9+
structure_type: Optional[str]
10+
return_format: Optional[str]
11+
912
with_attachments: Optional[str]
13+
need_content_analysis: Optional[str]
14+
recursion_deep_attachments: Optional[str]
15+
return_base64: Optional[str]
16+
1017
insert_table: Optional[str]
11-
return_format: Optional[str]
12-
structure_type: Optional[str]
13-
delimiter: Optional[str]
14-
encoding: Optional[str]
15-
document_type: Optional[str]
16-
pdf_with_text_layer: Optional[str]
17-
pages: Optional[str]
18+
need_pdf_table_analysis: Optional[str]
19+
table_type: Optional[str]
1820
orient_analysis_cells: Optional[str]
1921
orient_cell_angle: Optional[str]
22+
23+
pdf_with_text_layer: Optional[str]
24+
language: Optional[str]
25+
pages: Optional[str]
2026
is_one_column_document: Optional[str]
2127
document_orientation: Optional[str]
22-
html_fields: Optional[str]
23-
cloud_bucket: Optional[str]
2428
need_header_footer_analysis: Optional[str]
2529
need_binarization: Optional[str]
26-
need_pdf_table_analysis: Optional[str]
30+
31+
delimiter: Optional[str]
32+
encoding: Optional[str]
33+
html_fields: Optional[str]
2734
handle_invisible_table: Optional[str]
28-
return_base64: Optional[str]
29-
archive_as_single_file: Optional[str]
30-
upload_attachments_into_cloud: Optional[str]
31-
need_content_analysis: Optional[str]
32-
recursion_deep_attachments: Optional[str]
33-
table_type: Optional[str]
3435

3536
def __init__(self,
36-
language: Optional[str] = Body(description="a recognition language. Default: 'rus+eng'", enum=["rus+eng", "rus", "eng"], default=None),
37+
# type of document structure parsing
38+
document_type: Optional[str] = Body(description="a document type. Default: ''", enum=["", "law", "tz", "diploma"], default=None),
39+
structure_type: Optional[str] = Body(description="output structure type (linear or tree). Default: 'tree'", enum=["linear", "tree"], default=None),
40+
return_format: Optional[str] = Body(description="an option for returning a response in html form, json, pretty_json or tree. Assume that one should use json in all cases, all other formats are used for debug porpoises only. Default: 'json'", default=None),
41+
42+
# attachments handling
3743
with_attachments: Optional[str] = Body(description="an option to enable the analysis of attached files. Default: 'false'", default=None),
44+
need_content_analysis: Optional[str] = Body(description="turn on if you need parse the contents of the document attachments. Default: 'false'", default=None),
45+
recursion_deep_attachments: Optional[str] = Body(description="the depth on which nested attachments will be parsed if need_content_analysis=true. Default: '10'", default=None),
46+
return_base64: Optional[str] = Body(description="returns images in base64 format. Default: 'false'", default=None),
47+
48+
# tables handling
3849
insert_table: Optional[str] = Body(description="Insert table into the result tree's content or not. Default: 'false'", default=None),
39-
return_format: Optional[str] = Body(description="an option for returning a response in html form, json, pretty_json or tree. Assume that one should use json in all cases, all other formats are used for debug porpoises only. Default: 'json'", default=None),
40-
structure_type: Optional[str] = Body(description="output structure type (linear or tree). Default: 'tree'", enum=["linear", "tree"], default=None),
41-
delimiter: Optional[str] = Body(description="a column separator for csv-files", default=None),
42-
encoding: Optional[str] = Body(description="a document encoding", default=None),
43-
document_type: Optional[str] = Body(description="a document type. Default: ''", enum=["", "law", "tz", "diploma", "article", "slide"], default=None),
44-
pdf_with_text_layer: Optional[str] = Body(description="an option to extract text from a text layer to PDF or using OCR methods for image-documents. Default: 'auto_tabby'", enum=["true", "false", "auto", "auto_tabby", "tabby"], default=None),
45-
pages: Optional[str] = Body(description="an option to limit page numbers in pdf, archives with images. left:right, read pages from left to right. Default: ':'", default=None),
50+
need_pdf_table_analysis: Optional[str] = Body(description="include a table analysis into pdfs. Default: 'true'", default=None),
51+
table_type: Optional[str] = Body(description="a pipeline mode for a table recognition. Default: ''", default=None),
4652
orient_analysis_cells: Optional[str] = Body(description="a table recognition option enables analysis of rotated cells in table headers. Default: 'false'", default=None),
4753
orient_cell_angle: Optional[str] = Body(description="an option to set orientation of cells in table headers. \"270\" - cells are rotated 90 degrees clockwise, \"90\" - cells are rotated 90 degrees counterclockwise (or 270 clockwise)", default=None),
54+
55+
# pdf handling
56+
pdf_with_text_layer: Optional[str] = Body(description="an option to extract text from a text layer to PDF or using OCR methods for image-documents. Default: 'auto_tabby'", enum=["true", "false", "auto", "auto_tabby", "tabby"], default=None),
57+
language: Optional[str] = Body(description="a recognition language. Default: 'rus+eng'", enum=["rus+eng", "rus", "eng"], default=None),
58+
pages: Optional[str] = Body(description="an option to limit page numbers in pdf, archives with images. left:right, read pages from left to right. Default: ':'", default=None),
4859
is_one_column_document: Optional[str] = Body(description="an option to set one or multiple column document. \"auto\" - system predict number of columns in document pages, \"true\" - is one column documents, \"false\" - is multiple column documents. Default: 'auto'", default=None),
49-
document_orientation: Optional[str] = Body(description="an option to set vertical orientation of the document without using an orientation classifier \"auto\" - system predict angle (0, 90, 180, 370) and rotate document, \"no_change\" - do not predict orientation. Default: 'auto'", enum=["auto", "no_change"], default=None),
50-
html_fields: Optional[str] = Body(description="a list of fields for JSON documents to be parsed as HTML documents. It is written as a json string of a list, where each list item is a list of keys to get the field. Default: ''", default=None),
51-
cloud_bucket: Optional[str] = Body(description="a path (bucket) in the cloud storage mime. Default: ''", default=None),
60+
document_orientation: Optional[str] = Body(description="an option to set vertical orientation of the document without using an orientation classifier \"auto\" - system predict angle (0, 90, 180, 270) and rotate document, \"no_change\" - do not predict orientation. Default: 'auto'", enum=["auto", "no_change"], default=None),
5261
need_header_footer_analysis: Optional[str] = Body(description="include header-footer analysis into pdf with text layer. Default: 'false'", default=None),
5362
need_binarization: Optional[str] = Body(description="include an adaptive binarization into pdf without a text layer. Default: 'false'", default=None),
54-
need_pdf_table_analysis: Optional[str] = Body(description="include a table analysis into pdfs. Default: 'true'", default=None),
63+
64+
# other formats handling
65+
delimiter: Optional[str] = Body(description="a column separator for csv-files", default=None),
66+
encoding: Optional[str] = Body(description="a document encoding", default=None),
67+
html_fields: Optional[str] = Body(description="a list of fields for JSON documents to be parsed as HTML documents. It is written as a json string of a list, where each list item is a list of keys to get the field. Default: ''", default=None),
5568
handle_invisible_table: Optional[str] = Body(description="handle table without visible borders as tables in html. Default: 'false'", default=None),
56-
return_base64: Optional[str] = Body(description="returns images in base64 format. Default: 'false'", default=None),
57-
archive_as_single_file: Optional[str] = Body(description="additional parameters for the archive reader. Default: 'true'", default=None),
58-
upload_attachments_into_cloud: Optional[str] = Body(description="turn on if you need upload attachments into a cloud. Turn on if with_attachments=True and \"cloud_bucket\" not empty. Default: 'false'", default=None),
59-
need_content_analysis: Optional[str] = Body(description="turn on if you need parse the contents of the document attachments. Default: 'false'", default=None),
60-
recursion_deep_attachments: Optional[str] = Body(description="the depth on which nested attachments will be parsed if need_content_analysis=true. Default: '10'", default=None),
61-
table_type: Optional[str] = Body(description="a pipeline mode for a table recognition. Default: ''", default=None),
69+
70+
6271
**data: Any) -> None:
6372

6473
super().__init__(**data)
65-
self.language: str = language or "rus+eng"
74+
self.document_type: str = document_type or ""
75+
self.structure_type: str = structure_type or 'tree'
76+
self.return_format: str = return_format or 'json'
77+
6678
self.with_attachments: str = with_attachments or 'false'
79+
self.need_content_analysis: str = need_content_analysis or 'false'
80+
self.recursion_deep_attachments: str = recursion_deep_attachments or '10'
81+
self.return_base64: str = return_base64 or 'false'
82+
6783
self.insert_table: str = insert_table or 'false'
68-
self.return_format: str = return_format or 'json'
69-
self.structure_type: str = structure_type or 'tree'
70-
self.delimiter: str = delimiter
71-
self.encoding: str = encoding
72-
self.document_type: str = document_type or ""
73-
self.pdf_with_text_layer: str = pdf_with_text_layer or 'auto_tabby'
74-
self.pages: str = pages or ':'
84+
self.need_pdf_table_analysis: str = need_pdf_table_analysis or 'true'
85+
self.table_type: str = table_type or ''
7586
self.orient_analysis_cells: str = orient_analysis_cells or 'false'
7687
self.orient_cell_angle: str = orient_cell_angle or "90"
88+
89+
self.pdf_with_text_layer: str = pdf_with_text_layer or 'auto_tabby'
90+
self.language: str = language or "rus+eng"
91+
self.pages: str = pages or ':'
7792
self.is_one_column_document: str = is_one_column_document or 'auto'
7893
self.document_orientation: str = document_orientation or "auto"
79-
self.html_fields: str = html_fields or ''
80-
self.cloud_bucket: str = cloud_bucket or ''
8194
self.need_header_footer_analysis: str = need_header_footer_analysis or 'false'
8295
self.need_binarization: str = need_binarization or 'false'
83-
self.need_pdf_table_analysis: str = need_pdf_table_analysis or 'true'
96+
97+
self.delimiter: str = delimiter
98+
self.encoding: str = encoding
99+
self.html_fields: str = html_fields or ''
84100
self.handle_invisible_table: str = handle_invisible_table or 'false'
85-
self.return_base64: str = return_base64 or 'false'
86-
self.archive_as_single_file: str = archive_as_single_file or 'true'
87-
self.upload_attachments_into_cloud: str = upload_attachments_into_cloud or 'false'
88-
self.need_content_analysis: str = need_content_analysis or 'false'
89-
self.recursion_deep_attachments: str = recursion_deep_attachments or '10'
90-
self.table_type: str = table_type or ''

dedoc/api/dedoc_api.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,27 @@
11
import importlib
22
import os
3+
34
import uvicorn
5+
from fastapi import Response, FastAPI, Request, Depends, UploadFile, File
46
from fastapi.responses import UJSONResponse, ORJSONResponse
7+
from fastapi.staticfiles import StaticFiles
58
from starlette.responses import FileResponse, HTMLResponse, JSONResponse, PlainTextResponse
6-
from fastapi import Response, FastAPI, Request, Depends, UploadFile, File
79

810
from dedoc.api.api_args import QueryParameters
911
from dedoc.api.api_utils import json2html, json2tree, json2collapsed_tree
10-
from dedoc.api.init_api import app, config, static_files_dirs, PORT, static_path
1112
from dedoc.common.exceptions.dedoc_exception import DedocException
1213
from dedoc.common.exceptions.missing_file_exception import MissingFileException
14+
from dedoc.config import get_config
1315
from dedoc.manager.dedoc_thread_manager import DedocThreadedManager
1416

17+
config = get_config()
18+
PORT = config["api_port"]
19+
static_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static/")
20+
static_files_dirs = config.get("static_files_dirs")
21+
22+
app = FastAPI()
23+
app.mount('/static', StaticFiles(directory=config.get("static_path", static_path)), name="static")
24+
1525
module_api_args = importlib.import_module(config['import_path_init_api_args'])
1626
logger = config["logger"]
1727
version_file_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "VERSION"))

0 commit comments

Comments
 (0)