Compare commits
525 Commits
1.0.1-beta
...
duplicateF
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4d02e88905 | ||
|
|
c870ed86e0 | ||
|
|
923f551983 | ||
|
|
97e65f10cf | ||
|
|
36adf91744 | ||
|
|
8b73a87360 | ||
|
|
8c591a8a3b | ||
|
|
c5772c75e5 | ||
|
|
ff02d25eea | ||
|
|
98a7ee35ee | ||
|
|
59d48619b1 | ||
|
|
10056c4229 | ||
|
|
7e772abda7 | ||
|
|
09ea531a90 | ||
|
|
710d9bf6a5 | ||
|
|
bb81f921ff | ||
|
|
1468b1932f | ||
|
|
74d95b6a50 | ||
|
|
d33fb6ef31 | ||
|
|
4201558483 | ||
|
|
983b3d08f6 | ||
|
|
eec715551a | ||
|
|
d3f552173e | ||
|
|
3e3dcb03f9 | ||
|
|
44b0e70399 | ||
|
|
38aedac101 | ||
|
|
9a9d97f3bb | ||
|
|
a4cb8b51a6 | ||
|
|
1bbdebff42 | ||
|
|
783c4e1c5b | ||
|
|
eb5360a38b | ||
|
|
205d337751 | ||
|
|
d469ee82d8 | ||
|
|
c464283962 | ||
|
|
48467b14b5 | ||
|
|
70df9d0682 | ||
|
|
049971a78a | ||
|
|
052e95e53b | ||
|
|
fa0c193730 | ||
|
|
a98eb2f81b | ||
|
|
ae4de0b3e6 | ||
|
|
84b762877f | ||
|
|
2bb7aaeddf | ||
|
|
08434a703e | ||
|
|
552a319298 | ||
|
|
b9e72bf7a1 | ||
|
|
135544c0db | ||
|
|
c297fd7fe7 | ||
|
|
168f24b139 | ||
|
|
89ddea7e9b | ||
|
|
bfe005cb63 | ||
|
|
48c2e91f7e | ||
|
|
02f365b93f | ||
|
|
d78c3e3039 | ||
|
|
f18513fd0e | ||
|
|
caa94c4e28 | ||
|
|
7037877a77 | ||
|
|
6cccf22d54 | ||
|
|
ceb2b2861e | ||
|
|
298f50cb45 | ||
|
|
e616aa8373 | ||
|
|
0fe881df59 | ||
|
|
f3f48ea958 | ||
|
|
9a9d36dc65 | ||
|
|
028b728d82 | ||
|
|
23f323f52d | ||
|
|
49210e67c5 | ||
|
|
e519bf79be | ||
|
|
4f08610a28 | ||
|
|
544bdcb4e3 | ||
|
|
f3095144f5 | ||
|
|
75f31c7cb2 | ||
|
|
f7f4e41c95 | ||
|
|
6da177471b | ||
|
|
8a74e5b02b | ||
|
|
5658f261b0 | ||
|
|
6da3bf764e | ||
|
|
5e06d35057 | ||
|
|
82bcc876b3 | ||
|
|
d7a6882577 | ||
|
|
5e7e1b1513 | ||
|
|
cd9a02c255 | ||
|
|
b47f816dd5 | ||
|
|
d1a649c0ba | ||
|
|
b7759506fe | ||
|
|
97777d61d2 | ||
|
|
e622b56dae | ||
|
|
a24251e5b4 | ||
|
|
d4470a2015 | ||
|
|
d37022b71f | ||
|
|
5f38241bcb | ||
|
|
4fb9461491 | ||
|
|
c9b5bd625f | ||
|
|
558072a330 | ||
|
|
26fa7eeabb | ||
|
|
c50cef568e | ||
|
|
2db80399a6 | ||
|
|
4936c31c18 | ||
|
|
ada88d719f | ||
|
|
1b28623fe3 | ||
|
|
593f568ea7 | ||
|
|
7b4dba35b5 | ||
|
|
c95e700025 | ||
|
|
e10f7dd7a7 | ||
|
|
84dc148cff | ||
|
|
14c9609efe | ||
|
|
2a3620ea21 | ||
|
|
8c5d4869f9 | ||
|
|
c0aa665347 | ||
|
|
6900368251 | ||
|
|
ac1bdf2f9c | ||
|
|
c840724c9c | ||
|
|
220606a046 | ||
|
|
223269cc2e | ||
|
|
31b96fdbb9 | ||
|
|
908a500e7e | ||
|
|
ae20a2eec8 | ||
|
|
287c5f39c1 | ||
|
|
cfd2489228 | ||
|
|
86a83021a6 | ||
|
|
d7595f5ca1 | ||
|
|
5a2bb66d5b | ||
|
|
5de2ce65a4 | ||
|
|
95d167561d | ||
|
|
7d2702c3b6 | ||
|
|
d0f96b6511 | ||
|
|
ba71e61d87 | ||
|
|
191d72554c | ||
|
|
628251c75b | ||
|
|
71499c3d7c | ||
|
|
03b8bf4671 | ||
|
|
773735bf6e | ||
|
|
b62e291749 | ||
|
|
a66b5ea0e3 | ||
|
|
615650f822 | ||
|
|
ed16199940 | ||
|
|
7005bd296e | ||
|
|
cdeca34791 | ||
|
|
aefe778b36 | ||
|
|
c6e1dc87dc | ||
|
|
ef37158e57 | ||
|
|
444e67100c | ||
|
|
82d054fd05 | ||
|
|
f82c024f8d | ||
|
|
da4daa6a8a | ||
|
|
6e1e8959c9 | ||
|
|
aedc5bedb4 | ||
|
|
93f5061c8f | ||
|
|
d46e171bd6 | ||
|
|
e7fe520660 | ||
|
|
91f288e8f4 | ||
|
|
d7bd3bb94b | ||
|
|
9e0b0ac01c | ||
|
|
03a8d906ea | ||
|
|
fff28cf6ae | ||
|
|
9ee95b8d5e | ||
|
|
11bf5a9709 | ||
|
|
af4b3af14e | ||
|
|
9bb7fbbc9e | ||
|
|
beb7c57a6b | ||
|
|
ce48730bd5 | ||
|
|
806b65db24 | ||
|
|
cdf9a40227 | ||
|
|
0adac47968 | ||
|
|
096a89eab4 | ||
|
|
f877d620af | ||
|
|
c175e46b15 | ||
|
|
f0bc669d40 | ||
|
|
db3db48e5c | ||
|
|
cec585f8e0 | ||
|
|
d71a48d8d4 | ||
|
|
9e4a560911 | ||
|
|
f244255386 | ||
|
|
254e2c25ee | ||
|
|
7455cf17c8 | ||
|
|
d93cb50896 | ||
|
|
3316cab775 | ||
|
|
c01f00f6c3 | ||
|
|
06ff25550e | ||
|
|
1f7ef44556 | ||
|
|
fabf2b4df6 | ||
|
|
0fbaeb861e | ||
|
|
3dcc04a318 | ||
|
|
933e053df3 | ||
|
|
5f22a583e8 | ||
|
|
3174b49d94 | ||
|
|
93ce311359 | ||
|
|
bc43c5e329 | ||
|
|
9bf7aa20fb | ||
|
|
5416bb15c3 | ||
|
|
562a659195 | ||
|
|
1d3d6e2741 | ||
|
|
c9724527b5 | ||
|
|
2891209b4e | ||
|
|
5b87e19d3e | ||
|
|
674e24fc41 | ||
|
|
91f82fd6d3 | ||
|
|
cf43513d52 | ||
|
|
a7288a94cc | ||
|
|
d0918c92e4 | ||
|
|
4ff2061568 | ||
|
|
08c402149b | ||
|
|
184dbf0684 | ||
|
|
ed0050ba05 | ||
|
|
68030a1024 | ||
|
|
983ad1fcf4 | ||
|
|
d959ac0401 | ||
|
|
2a550db02a | ||
|
|
6369fa5fda | ||
|
|
d5a13a4206 | ||
|
|
b2532ce03a | ||
|
|
79a67d8c29 | ||
|
|
d9bd38674c | ||
|
|
a0154aaaae | ||
|
|
17f74cf296 | ||
|
|
3f112cd578 | ||
|
|
f6439049d8 | ||
|
|
2fe818872c | ||
|
|
a419969b85 | ||
|
|
ee52448f17 | ||
|
|
79103990fa | ||
|
|
22dbafbc00 | ||
|
|
0df283778c | ||
|
|
a6282b5449 | ||
|
|
5574280ad6 | ||
|
|
19b907b742 | ||
|
|
a9ff8f37b0 | ||
|
|
0769111f8c | ||
|
|
cf6ae8b5ae | ||
|
|
1d6846ced3 | ||
|
|
d516d80093 | ||
|
|
bf9ab71fd9 | ||
|
|
33b00ad323 | ||
|
|
301ff084f1 | ||
|
|
0c146bb245 | ||
|
|
08cc4a1acb | ||
|
|
f97a1653d9 | ||
|
|
d9dbab301a | ||
|
|
3d93197101 | ||
|
|
752a1d8923 | ||
|
|
68002daffa | ||
|
|
ad5062c582 | ||
|
|
2680468f34 | ||
|
|
6156fc296a | ||
|
|
0feed294d4 | ||
|
|
e57736b955 | ||
|
|
70fcdc0129 | ||
|
|
9a64195ebd | ||
|
|
b0f229f851 | ||
|
|
877a5ccd85 | ||
|
|
c0f2e2f771 | ||
|
|
0adfc9beb3 | ||
|
|
d0bc41d7ee | ||
|
|
fa46a065a4 | ||
|
|
8fcd5ba7d6 | ||
|
|
759cdc6b40 | ||
|
|
1405d9ff0e | ||
|
|
d8fcbbad0a | ||
|
|
3eca25db34 | ||
|
|
c8a5a89369 | ||
|
|
ff578ea819 | ||
|
|
1c730c25d5 | ||
|
|
35b7b39b86 | ||
|
|
719c711484 | ||
|
|
afbbc9d00c | ||
|
|
b8e0a45fc8 | ||
|
|
b7360dd33e | ||
|
|
d9f1956426 | ||
|
|
b5c7f36410 | ||
|
|
0b0663d935 | ||
|
|
eee1f65436 | ||
|
|
9a8d4149f2 | ||
|
|
b02a205668 | ||
|
|
57284dfbed | ||
|
|
afcbde7fc6 | ||
|
|
151fac5bf1 | ||
|
|
57c1efdab9 | ||
|
|
6b272cef87 | ||
|
|
1cdc732739 | ||
|
|
d1b00d162d | ||
|
|
3dd3980bc1 | ||
|
|
cbf475eb26 | ||
|
|
ac8b575659 | ||
|
|
ac8ef286a4 | ||
|
|
f567dc37be | ||
|
|
15c5fc5258 | ||
|
|
cc985b52a5 | ||
|
|
910b0386be | ||
|
|
0fece23405 | ||
|
|
eee320e0c7 | ||
|
|
accabf8e21 | ||
|
|
acc253d35c | ||
|
|
ede0154efe | ||
|
|
5b805b1428 | ||
|
|
2e6b2a89db | ||
|
|
c028bb4ddc | ||
|
|
b70beb5684 | ||
|
|
128af4521b | ||
|
|
43cf7a80c8 | ||
|
|
3223ed190c | ||
|
|
9e2817c037 | ||
|
|
6e7bd10fb9 | ||
|
|
c099205779 | ||
|
|
47d8da0e80 | ||
|
|
0f7e88e58c | ||
|
|
65902a15b1 | ||
|
|
a68b2babeb | ||
|
|
4098802e43 | ||
|
|
9c14258e9f | ||
|
|
33bdbe8be8 | ||
|
|
a76864c109 | ||
|
|
cb68d07751 | ||
|
|
8e9fccdbbc | ||
|
|
39990fc2b4 | ||
|
|
e8c315d834 | ||
|
|
f8a06a8746 | ||
|
|
9415087da7 | ||
|
|
9aee5c32eb | ||
|
|
fcdb4a3889 | ||
|
|
534a326258 | ||
|
|
0390ff5919 | ||
|
|
b800ae1751 | ||
|
|
a2c17982d3 | ||
|
|
0347befae6 | ||
|
|
af54b79790 | ||
|
|
dd04ae98a0 | ||
|
|
31b76fba92 | ||
|
|
9f4a4b0eb0 | ||
|
|
575a23c6bf | ||
|
|
5d84f09359 | ||
|
|
3072583482 | ||
|
|
8d867cf78a | ||
|
|
36c79b5a2a | ||
|
|
dfdaf731b4 | ||
|
|
67bff8586c | ||
|
|
9e4cbea6e4 | ||
|
|
d150b2ce54 | ||
|
|
a20949cc4d | ||
|
|
e3fceb20a2 | ||
|
|
f4e00d9ef3 | ||
|
|
1980bd5988 | ||
|
|
db54affc74 | ||
|
|
0edb9444ef | ||
|
|
b22c25f53f | ||
|
|
76e6666a79 | ||
|
|
a804a10e0e | ||
|
|
fe413b12c1 | ||
|
|
e38dc2f063 | ||
|
|
5e5418090b | ||
|
|
56c1f8582a | ||
|
|
00f8c0a280 | ||
|
|
1d915eb155 | ||
|
|
b7b8060ef2 | ||
|
|
2d190b076a | ||
|
|
cd92b1afea | ||
|
|
4d21a001d6 | ||
|
|
4af59d2315 | ||
|
|
c9c98b6c11 | ||
|
|
1ff43db2ce | ||
|
|
822f6b4729 | ||
|
|
44a8dc6815 | ||
|
|
a35576895c | ||
|
|
631662b30c | ||
|
|
cbe3f5a2dc | ||
|
|
73f8bd426b | ||
|
|
0642604480 | ||
|
|
1d95f5076e | ||
|
|
53b0c2e8f9 | ||
|
|
f59f5fe981 | ||
|
|
67545d8a13 | ||
|
|
ab3e3b40c4 | ||
|
|
188024c2db | ||
|
|
324b56a623 | ||
|
|
782d424392 | ||
|
|
cf63bfda9d | ||
|
|
903d4c647c | ||
|
|
407b83fe90 | ||
|
|
27edc80d2b | ||
|
|
01f48f8b91 | ||
|
|
527e690170 | ||
|
|
d100572aa4 | ||
|
|
42640c4ad5 | ||
|
|
a61972e503 | ||
|
|
464e147223 | ||
|
|
8759784561 | ||
|
|
ee5b4a689e | ||
|
|
71ccf1eea8 | ||
|
|
a9ee7c463b | ||
|
|
6f683a71c7 | ||
|
|
24b192b22c | ||
|
|
b6b1a4737f | ||
|
|
00202cc865 | ||
|
|
235524b06d | ||
|
|
8a7f822970 | ||
|
|
ff3f048bb4 | ||
|
|
abda202f32 | ||
|
|
2d4ac84de0 | ||
|
|
86732e7827 | ||
|
|
693b5b1978 | ||
|
|
e3d3ecfd31 | ||
|
|
ce6b81ab73 | ||
|
|
501365b5a3 | ||
|
|
c6741d4392 | ||
|
|
42feae53dd | ||
|
|
c65695b8dc | ||
|
|
4da71e262b | ||
|
|
c519fd33d5 | ||
|
|
07ef0211b9 | ||
|
|
c45b56a5b6 | ||
|
|
6f27fc7669 | ||
|
|
4530ac017c | ||
|
|
400fe6efa3 | ||
|
|
ac7a12d18d | ||
|
|
c2ff11fab7 | ||
|
|
34019ff338 | ||
|
|
176bc43888 | ||
|
|
2e290c4c74 | ||
|
|
74a374d46b | ||
|
|
58f5f10c78 | ||
|
|
7d8ed954a9 | ||
|
|
078b3cef3c | ||
|
|
22ef0250ca | ||
|
|
cc53162dcc | ||
|
|
fa309cfcef | ||
|
|
4d57b0cf79 | ||
|
|
6ea5d28609 | ||
|
|
9d56a2ce9a | ||
|
|
811759478a | ||
|
|
28e2d93314 | ||
|
|
93b3117699 | ||
|
|
10e6a1019e | ||
|
|
2024555780 | ||
|
|
e15c3fa3e6 | ||
|
|
8aa6403f51 | ||
|
|
fb5fca1dc4 | ||
|
|
75d5b1a695 | ||
|
|
e56d9bddbf | ||
|
|
7d9aa70dc0 | ||
|
|
6d72ed2a69 | ||
|
|
9b584f78a0 | ||
|
|
dfe0e74f9c | ||
|
|
a11c08a2ee | ||
|
|
9159204883 | ||
|
|
605e27ce99 | ||
|
|
2dc08b36ea | ||
|
|
60dae4f1fb | ||
|
|
85728d33bb | ||
|
|
2ade08aa89 | ||
|
|
50909962d3 | ||
|
|
cc02023730 | ||
|
|
5bdc40b9f5 | ||
|
|
4f3e63db07 | ||
|
|
b8893b853f | ||
|
|
6da6f38673 | ||
|
|
369dcbb5a1 | ||
|
|
ec010f29e8 | ||
|
|
22867bc9e6 | ||
|
|
dde1913e07 | ||
|
|
5b5842a5f8 | ||
|
|
fbf086886f | ||
|
|
c1ff6c4b26 | ||
|
|
99b110d052 | ||
|
|
3df498eed4 | ||
|
|
b5ab2a6ac9 | ||
|
|
5c91960f04 | ||
|
|
3b52fd3213 | ||
|
|
9366457b88 | ||
|
|
1cb7ef66db | ||
|
|
ee6a05deae | ||
|
|
c978883584 | ||
|
|
9b5508ecba | ||
|
|
8e1c6fae7c | ||
|
|
59e662f5a7 | ||
|
|
6486d97ee3 | ||
|
|
8c088440c5 | ||
|
|
320ee1c5d1 | ||
|
|
e123720354 | ||
|
|
d39d4e79ad | ||
|
|
8d7eeece30 | ||
|
|
3b64e1a3ec | ||
|
|
81ae9bd635 | ||
|
|
27846772e9 | ||
|
|
baf697b919 | ||
|
|
59ede8d446 | ||
|
|
8b748a3343 | ||
|
|
75471aaddc | ||
|
|
7225f261f1 | ||
|
|
c466264d43 | ||
|
|
14e801b717 | ||
|
|
af4b467814 | ||
|
|
1b3feaa167 | ||
|
|
2526fa0ca8 | ||
|
|
a878d36dcf | ||
|
|
90de6433b6 | ||
|
|
d9abc364f1 | ||
|
|
e542b6df1f | ||
|
|
a7a6b085f1 | ||
|
|
0078f76e8c | ||
|
|
1a01cb60d9 | ||
|
|
0f81ce4c24 | ||
|
|
c4ef4137d0 | ||
|
|
cdc6d71356 | ||
|
|
2357a6378e | ||
|
|
9503d0fef4 | ||
|
|
c46dda4540 | ||
|
|
894c23f64f | ||
|
|
9360fa954c | ||
|
|
74408e56fd | ||
|
|
dd8e54fa6b | ||
|
|
b378840878 | ||
|
|
c0a6406dc9 | ||
|
|
df3544e734 | ||
|
|
d40de5b67e | ||
|
|
25b63dfc65 | ||
|
|
70f50c8595 | ||
|
|
e9aba4e119 | ||
|
|
53aca0ee08 | ||
|
|
9aa41823b4 | ||
|
|
8d4a336b50 | ||
|
|
3c96e68fde | ||
|
|
93f316b820 | ||
|
|
ccde71f9d0 | ||
|
|
7186c6792a | ||
|
|
e8961ed299 | ||
|
|
1f050436d3 |
4
.flake8
Normal file
@@ -0,0 +1,4 @@
|
||||
[flake8]
|
||||
max-line-length = 120
|
||||
extend-ignore = E203, E501, E722
|
||||
extend-exclude = venv, scripts, build, dist
|
||||
63
.github/workflows/build.yaml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "[0-9]+.[0-9]+.[0-9]+*"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.9]
|
||||
os: [ubuntu-latest, macos-10.15, windows-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python3 -m pip install -r requirements_dev.txt
|
||||
python3 -m pip install -r requirements.txt
|
||||
for requirement in requirements-*.txt; do
|
||||
python3 -m pip install -r "$requirement"
|
||||
done
|
||||
shell: bash
|
||||
- name: Install Windows dependencies
|
||||
run: |
|
||||
choco install -y zip
|
||||
if: runner.os == 'Windows'
|
||||
- name: build
|
||||
run: |
|
||||
make pydist
|
||||
make dist
|
||||
- name: Archive production artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
if: runner.os != 'Linux' # linux binary currently has a segfault when running on latest fedora
|
||||
with:
|
||||
name: "${{ format('ComicTagger-{0}', runner.os) }}"
|
||||
path: dist/*.zip
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
prerelease: "${{ contains(github.ref, '-') }}" # alpha-releases should be 1.3.0-alpha.x full releases should be 1.3.0
|
||||
draft: true
|
||||
files: dist/*.zip
|
||||
- name: "Publish distribution 📦 to PyPI"
|
||||
if: startsWith(github.ref, 'refs/tags/') && runner.os == 'Linux'
|
||||
uses: pypa/gh-action-pypi-publish@master
|
||||
with:
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
packages_dir: piprelease
|
||||
157
.gitignore
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
# generated by setuptools_scm
|
||||
ctversion.py
|
||||
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion
|
||||
|
||||
*.iml
|
||||
|
||||
## Directory-based project format:
|
||||
.idea/
|
||||
|
||||
### Other editors
|
||||
.*.swp
|
||||
nbproject/
|
||||
.vscode
|
||||
|
||||
comictaggerlib/_version.py
|
||||
*.exe
|
||||
*.zip
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
59
.travis.yml
Normal file
@@ -0,0 +1,59 @@
|
||||
language: python
|
||||
# Only build tags
|
||||
if: type = pull_request OR tag IS present
|
||||
branches:
|
||||
only:
|
||||
- develop
|
||||
- /^\d+\.\d+\.\d+.*$/
|
||||
env:
|
||||
global:
|
||||
- PYTHON=python3
|
||||
- PIP=pip3
|
||||
- SETUPTOOLS_SCM_PRETEND_VERSION=$TRAVIS_TAG
|
||||
- MAKE=make
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
python: 3.8
|
||||
- name: "Python: 3.7"
|
||||
os: osx
|
||||
language: shell
|
||||
python: 3.7
|
||||
env: PYTHON=python3 PIP="python3 -m pip"
|
||||
cache:
|
||||
- directories:
|
||||
- $HOME/Library/Caches/pip
|
||||
- os: windows
|
||||
language: bash
|
||||
env: PATH=/C/Python37:/C/Python37/Scripts:$PATH MAKE=mingw32-make PIP=pip PYTHON=python
|
||||
before_install:
|
||||
- if [ "$TRAVIS_OS_NAME" = "windows" ]; then choco install -y python --version 3.7.9; choco install -y mingw zip; fi
|
||||
install:
|
||||
- $PIP install -r requirements_dev.txt
|
||||
- $PIP install -r requirements-GUI.txt
|
||||
- $PIP install -r requirements-CBR.txt
|
||||
script:
|
||||
- if [ "$TRAVIS_OS_NAME" != "linux" ]; then $MAKE dist ; fi
|
||||
|
||||
deploy:
|
||||
- name: "$TRAVIS_TAG"
|
||||
body: Released ComicTagger $TRAVIS_TAG
|
||||
provider: releases
|
||||
skip_cleanup: true
|
||||
api_key:
|
||||
secure: RgohcOJOfLhXXT12bMWaLwOqhe+ClSCYXjYuUJuWK4/E1fdd1xu1ebdQU+MI/R8cZ0Efz3sr2n3NkO/Aa8gN68xEfuF7RVRMm64P9oPrfZgGdsD6H43rU/6kN8bgaDRmCYpLTfXaJ+/gq0x1QDkhWJuceF2BYEGGvL0BvS/TUsLyjVxs8ujTplLyguXHNEv4/7Yz7SBNZZmUHjBuq/y+l8ds3ra9rSgAVAN1tMXoFKJPv+SNNkpTo5WUNMPzBnN041F1rzqHwYDLog2V7Krp9JkXzheRFdAr51/tJBYzEd8AtYVdYvaIvoO6A4PiTZ7MpsmcZZPAWqLQU00UTm/PhT/LVR+7+f8lOBG07RgNNHB+edjDRz3TAuqyuZl9wURWTZKTPuO49TkZMz7Wm0DRNZHvBm1IXLeSG7Tll2YL1+WpZNZg+Dhro2J1QD3vxDXafhMdTCB4z0q5aKpG93IT0p6oXOO0oEGOPZYbA2c5R3SXWSyqd1E1gdhbVjIZr59h++TEf1zz07tvWHqPuAF/Ly/j+dIcY2wj0EzRWaSASWgUpTnMljAkHtWhqDw4GXGDRkRUWRJl1d0/JyVqCeIdRzDQNl8/q7BcO3F1zqr1PgnYdz0lfwWxL1/ekw2vHOJE/GOdkyvX0aJrnaOV338mjJbfGHYv4ESc9ow1kdtIbiU=
|
||||
file_glob: true
|
||||
file: dist/*.zip
|
||||
draft: true
|
||||
on:
|
||||
tags: true
|
||||
condition: $TRAVIS_OS_NAME != "linux"
|
||||
- provider: pypi
|
||||
user: __token__
|
||||
password:
|
||||
secure: h+y5WkE8igf864dnsbGPFvOBkyPkuBYtnDRt+EgxHd71EZnV2YP7ns2Cx12su/SVVDdZCBlmHVtkhl6Jmqy+0rTkSYx+3mlBOqyl8Cj5+BlP/dP7Bdmhs2uLZk2YYL1avbC0A6eoNJFtCkjurnB/jCGE433rvMECWJ5x2HsQTKchCmDAEdAZbRBJrzLFsrIC+6NXW1IJZjd+OojbhLSyVar2Jr32foh6huTcBu/x278V1+zIC/Rwy3W67+3c4aZxYrI47FoYFza0jjFfr3EoSkKYUSByMTIvhWaqB2gIsF0T160jgDd8Lcgej+86ACEuG0v01VE7xoougqlOaJ94eAmapeM7oQXzekSwSAxcK3JQSfgWk/AvPhp07T4pQ8vCZmky6yqvVp1EzfKarTeub1rOnv+qo1znKLrBtOoq6t8pOAeczDdIDs51XT/hxaijpMRCM8vHxN4Kqnc4DY+3KcF7UFyH1ifQJHQe71tLBsM/GnAcJM5/3ykFVGvRJ716p4aa6IoGsdNk6bqlysNh7nURDl+bfm+CDXRkO2jkFwUFNqPHW7JwY6ZFx+b5SM3TzC3obJhfMS7OC37fo2geISOTR0xVie6NvpN6TjNAxFTfDxWJI7yH3Al2w43B3uYDd97WeiN+B+HVWtdaER87IVSRbRqFrRub+V+xrozT0y0=
|
||||
skip_existing: true
|
||||
skip_cleanup: true
|
||||
on:
|
||||
tags: true
|
||||
condition: $TRAVIS_OS_NAME = "linux"
|
||||
137
CONTRIBUTING.md
Normal file
@@ -0,0 +1,137 @@
|
||||
# How to contribute
|
||||
|
||||
If your not sure what you can do or you need to ask a question or just want to talk about ComicTagger head over to the [discussions tab](https://github.com/comictagger/comictagger/discussions/categories/general) and start a discussion
|
||||
|
||||
## Tests
|
||||
|
||||
We have tests written using pytest! Some of them even pass! If you are contributing code any tests you can write are appreciated.
|
||||
|
||||
A great place to start is extending the tests that are already made.
|
||||
|
||||
For example the file tests/filenames.py has lists of filenames to be parsed in the format:
|
||||
```py
|
||||
pytest.param(
|
||||
"Star Wars - War of the Bounty Hunters - IG-88 (2021) (Digital) (Kileko-Empire).cbz",
|
||||
"number ends series, no-issue",
|
||||
{
|
||||
"issue": "",
|
||||
"series": "Star Wars - War of the Bounty Hunters - IG-88",
|
||||
"volume": "",
|
||||
"year": "2021",
|
||||
"remainder": "(Digital) (Kileko-Empire)",
|
||||
"issue_count": "",
|
||||
},
|
||||
marks=pytest.mark.xfail,
|
||||
)
|
||||
```
|
||||
|
||||
A test consists of 3-4 parts
|
||||
1. The filename to be parsed
|
||||
2. The reason it might fail
|
||||
3. What the result of parsing the filename should be
|
||||
4. `marks=pytest.mark.xfail` This marks the test as expected to fail
|
||||
|
||||
If you are not comfortable creating a pull request you can [open an issue](https://github.com/comictagger/comictagger/issues/new/choose) or [start a discussion](https://github.com/comictagger/comictagger/discussions/new)
|
||||
|
||||
## Submitting changes
|
||||
|
||||
Please open a [GitHub Pull Request](https://github.com/comictagger/comictagger/pull/new/develop) with a clear list of what you've done (read more about [pull requests](http://help.github.com/pull-requests/)). When you send a pull request, we will love you forever if you include tests. We can always use more test coverage. Please run the code tools below and make sure all of your commits are atomic (one feature per commit).
|
||||
|
||||
## Contributing Code
|
||||
|
||||
Currently only python 3.9 is supported however 3.10 will probably work if you try it
|
||||
|
||||
Those on linux should install `Pillow` from the system package manager if possible and if the GUI and/or the CBR/RAR comicbooks are going to be used `pyqt5` and `unrar-cffi` should be installed from the system package manager
|
||||
|
||||
Those on macOS will need to ensure that you are using python3 in x86 mode either by installing an x86 only version of python or using the universal installer and using `python3-intel64` instead of `python3`
|
||||
|
||||
1. Clone the repository
|
||||
```
|
||||
git clone https://github.com/comictagger/comictagger.git
|
||||
```
|
||||
|
||||
2. It is preferred to use a virtual env for running from source, adding the `--system-site-packages` allows packages already installed via the system package manager to be used:
|
||||
|
||||
```
|
||||
python3 -m venv --system-site-packages venv
|
||||
```
|
||||
|
||||
3. Activate the virtual env:
|
||||
```
|
||||
. venv/bin/activate
|
||||
```
|
||||
or if on windows PowerShell
|
||||
```
|
||||
. venv/bin/activate.ps1
|
||||
```
|
||||
|
||||
4. install dependencies:
|
||||
```bash
|
||||
pip install -r requirements_dev.txt -r requirements.txt
|
||||
# if installing optional dependencies
|
||||
pip install -r requirements-GUI.txt -r requirements-CBR.txt
|
||||
```
|
||||
|
||||
5. install ComicTagger
|
||||
```
|
||||
pip install .
|
||||
```
|
||||
|
||||
6. (optionall) run pytest to ensure that their are no failures (xfailed means expected failure)
|
||||
```
|
||||
$ pytest
|
||||
============================= test session starts ==============================
|
||||
platform darwin -- Python 3.9.12, pytest-7.1.1, pluggy-1.0.0
|
||||
rootdir: /Users/timmy/build/source/comictagger
|
||||
collected 61 items
|
||||
|
||||
tests/test_FilenameParser.py ..x......x.xxx.xx....xxxxxx.xx.x..xxxxxxx [ 67%]
|
||||
tests/test_comicarchive.py x... [ 73%]
|
||||
tests/test_rename.py ..xxx.xx..XXX.XX [100%]
|
||||
|
||||
================== 27 passed, 29 xfailed, 5 xpassed in 2.68s ===================
|
||||
```
|
||||
|
||||
7. Make your changes
|
||||
8. run code tools and correct any issues
|
||||
```bash
|
||||
black .
|
||||
isort .
|
||||
flake8 .
|
||||
pytest
|
||||
```
|
||||
|
||||
black: formats all of the code consistently so there are no surprises<br>
|
||||
isort: sorts imports so that you can always find where an import is located<br>
|
||||
flake8: checks for code quality and style (warns for unused imports and similar issues)<br>
|
||||
pytest: runs tests for ComicTagger functionality
|
||||
|
||||
|
||||
if on mac or linux most of this can be accomplished by running
|
||||
```
|
||||
make install
|
||||
# or make PYTHON=python3-intel64 install
|
||||
. venv/bin/activate
|
||||
make CI
|
||||
```
|
||||
There is also `make check` which will run all of the code tools in a read-only capacity
|
||||
```
|
||||
$ make check
|
||||
venv/bin/black --check .
|
||||
All done! ✨ 🍰 ✨
|
||||
52 files would be left unchanged.
|
||||
venv/bin/isort --check .
|
||||
Skipped 6 files
|
||||
venv/bin/flake8 .
|
||||
venv/bin/pytest
|
||||
============================= test session starts ==============================
|
||||
platform darwin -- Python 3.9.12, pytest-7.1.1, pluggy-1.0.0
|
||||
rootdir: /Users/timmy/build/source/comictagger
|
||||
collected 61 items
|
||||
|
||||
tests/test_FilenameParser.py ..x......x.xxx.xx....xxxxxx.xx.x..xxxxxxx [ 67%]
|
||||
tests/test_comicarchive.py x... [ 73%]
|
||||
tests/test_rename.py ..xxx.xx..XXX.XX [100%]
|
||||
|
||||
================== 27 passed, 29 xfailed, 5 xpassed in 2.68s ===================
|
||||
```
|
||||
7
MANIFEST.in
Normal file
@@ -0,0 +1,7 @@
|
||||
include README.md
|
||||
include release_notes.txt
|
||||
include requirements.txt
|
||||
recursive-include scripts *.py *.txt
|
||||
recursive-include desktop-integration *
|
||||
include windows/app.ico
|
||||
include mac/app.icns
|
||||
103
Makefile
@@ -1,24 +1,85 @@
|
||||
TAGGER_BASE := $(HOME)/Dropbox/tagger/comictagger
|
||||
VERSION_STR := $(shell grep version $(TAGGER_BASE)/ctversion.py| cut -d= -f2 | sed 's/\"//g')
|
||||
PIP ?= pip3
|
||||
PYTHON ?= python3
|
||||
VERSION_STR := $(shell $(PYTHON) setup.py --version)
|
||||
|
||||
all: clean
|
||||
SITE_PACKAGES := $(shell $(PYTHON) -c 'import sysconfig; print(sysconfig.get_paths()["purelib"])')
|
||||
PACKAGE_PATH = $(SITE_PACKAGES)/comictagger-$(VERSION_STR).dist-info
|
||||
|
||||
VENV := $(shell echo $${VIRTUAL_ENV-venv})
|
||||
PY3 := $(shell command -v $(PYTHON) 2> /dev/null)
|
||||
PYTHON_VENV := $(VENV)/bin/python
|
||||
INSTALL_STAMP := $(VENV)/.install.stamp
|
||||
INSTALL_GUI_STAMP := $(VENV)/.install-GUI.stamp
|
||||
|
||||
|
||||
ifeq ($(OS),Windows_NT)
|
||||
OS_VERSION=win-$(PROCESSOR_ARCHITECTURE)
|
||||
APP_NAME=comictagger.exe
|
||||
FINAL_NAME=ComicTagger-$(VERSION_STR)-$(OS_VERSION).exe
|
||||
else ifeq ($(shell uname -s),Darwin)
|
||||
OS_VERSION=osx-$(shell defaults read loginwindow SystemVersionStampAsString)-$(shell uname -m)
|
||||
APP_NAME=ComicTagger.app
|
||||
FINAL_NAME=ComicTagger-$(VERSION_STR)-$(OS_VERSION).app
|
||||
else
|
||||
APP_NAME=comictagger
|
||||
FINAL_NAME=ComicTagger-$(VERSION_STR)
|
||||
endif
|
||||
|
||||
.PHONY: all clean pydist upload dist CI check run
|
||||
|
||||
all: clean dist
|
||||
|
||||
$(PYTHON_VENV):
|
||||
@if [ -z $(PY3) ]; then echo "Python 3 could not be found."; exit 2; fi
|
||||
$(PY3) -m venv --system-site-packages $(VENV)
|
||||
|
||||
clean:
|
||||
rm -f *~ *.pyc *.pyo
|
||||
rm -f logdict*.log
|
||||
|
||||
|
||||
zip:
|
||||
cd release; \
|
||||
rm -rf *zip comictagger-src-$(VERSION_STR) ; \
|
||||
svn checkout https://comictagger.googlecode.com/svn/trunk/ comictagger-src-$(VERSION_STR); \
|
||||
zip -r comictagger-src-$(VERSION_STR).zip comictagger-src-$(VERSION_STR); \
|
||||
rm -rf comictagger-src-$(VERSION_STR)
|
||||
|
||||
@echo When satisfied with release, do this:
|
||||
@echo make svn_tag
|
||||
|
||||
svn_tag:
|
||||
svn copy https://comictagger.googlecode.com/svn/trunk \
|
||||
https://comictagger.googlecode.com/svn/tags/$(VERSION_STR) -m "Release $(VERSION_STR)"
|
||||
|
||||
find . -type d -name "__pycache__" | xargs rm -rf {};
|
||||
rm -rf $(INSTALL_STAMP)
|
||||
rm -rf dist MANIFEST
|
||||
$(MAKE) -C mac clean
|
||||
rm -rf build
|
||||
rm comictaggerlib/ctversion.py
|
||||
|
||||
CI: ins
|
||||
black .
|
||||
isort .
|
||||
flake8 .
|
||||
pytest
|
||||
|
||||
check: install
|
||||
$(VENV)/bin/black --check .
|
||||
$(VENV)/bin/isort --check .
|
||||
$(VENV)/bin/flake8 .
|
||||
$(VENV)/bin/pytest
|
||||
|
||||
pydist: CI
|
||||
make clean
|
||||
mkdir -p piprelease
|
||||
rm -f comictagger-$(VERSION_STR).zip
|
||||
$(PYTHON) setup.py sdist --formats=gztar
|
||||
mv dist/comictagger-$(VERSION_STR).tar.gz piprelease
|
||||
rm -rf comictagger.egg-info dist
|
||||
|
||||
upload:
|
||||
$(PYTHON) setup.py register
|
||||
$(PYTHON) setup.py sdist --formats=gztar upload
|
||||
|
||||
install: $(INSTALL_STAMP)
|
||||
$(INSTALL_STAMP): $(PYTHON_VENV) requirements.txt requirements_dev.txt
|
||||
$(PYTHON_VENV) -m pip install -r requirements_dev.txt
|
||||
$(PYTHON_VENV) -m pip install -e .
|
||||
touch $(INSTALL_STAMP)
|
||||
|
||||
install-GUI: $(INSTALL_GUI_STAMP)
|
||||
$(INSTALL_GUI_STAMP): requirements-GUI.txt
|
||||
$(PYTHON_VENV) -m pip install -r requirements-GUI.txt
|
||||
touch $(INSTALL_GUI_STAMP)
|
||||
|
||||
ins: $(PACKAGE_PATH)
|
||||
$(PACKAGE_PATH):
|
||||
$(PIP) install -e .
|
||||
|
||||
dist: CI
|
||||
pyinstaller -y comictagger.spec
|
||||
cd dist && zip -r $(FINAL_NAME).zip $(APP_NAME)
|
||||
|
||||
50
README.md
Normal file
@@ -0,0 +1,50 @@
|
||||
[](https://github.com/comictagger/comictagger/actions/workflows/build.yaml)
|
||||
[](https://gitter.im/comictagger/community)
|
||||
[](https://groups.google.com/forum/#!forum/comictagger)
|
||||
[](https://twitter.com/comictagger)
|
||||
[](https://www.facebook.com/ComicTagger-139615369550787/)
|
||||
|
||||
# ComicTagger
|
||||
|
||||
ComicTagger is a **multi-platform** app for **writing metadata to digital comics**, written in Python and PyQt.
|
||||
|
||||

|
||||
|
||||
## Features
|
||||
|
||||
* Runs on macOS, Microsoft Windows, and Linux systems
|
||||
* Get comic information from [Comic Vine](https://comicvine.gamespot.com/)
|
||||
* **Automatic issue matching** using advanced image processing techniques
|
||||
* **Batch processing** in the GUI for tagging hundreds or more comics at a time
|
||||
* Support for **ComicRack** and **ComicBookLover** tagging formats
|
||||
* Native full support for **CBZ** digital comics
|
||||
* Native read only support for **CBR** digital comics: full support enabled installing additional [rar tools](https://www.rarlab.com/download.htm)
|
||||
* Command line interface (CLI) enabling **custom scripting** and **batch operations on large collections**
|
||||
|
||||
For details, screen-shots, and more, visit [the Wiki](https://github.com/comictagger/comictagger/wiki)
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
### Binaries
|
||||
|
||||
Windows and macOS binaries are provided in the [Releases Page](https://github.com/comictagger/comictagger/releases).
|
||||
|
||||
Just unzip the archive in any folder and run, no additional installation steps are required.
|
||||
|
||||
### PIP installation
|
||||
|
||||
A pip package is provided, you can install it with:
|
||||
|
||||
```
|
||||
$ pip3 install comictagger[GUI]
|
||||
```
|
||||
|
||||
There are two optional dependencies GUI and CBR. You can install the optional dependencies by specifying one or more of `GUI`,`CBR` or `all` in braces e.g. `comictagger[CBR,GUI]`
|
||||
|
||||
### From source
|
||||
|
||||
1. Ensure you have python 3.9 installed
|
||||
2. Clone this repository `git clone https://github.com/comictagger/comictagger.git`
|
||||
3. `pip3 install -r requirements_dev.txt`
|
||||
7. `pip3 install .` or `pip3 install .[GUI]`
|
||||
@@ -1,18 +0,0 @@
|
||||
The unrar.dll library is freeware. This means:
|
||||
|
||||
1. All copyrights to RAR and the unrar.dll are exclusively
|
||||
owned by the author - Alexander Roshal.
|
||||
|
||||
2. The unrar.dll library may be used in any software to handle RAR
|
||||
archives without limitations free of charge.
|
||||
|
||||
3. THE RAR ARCHIVER AND THE UNRAR.DLL LIBRARY ARE DISTRIBUTED "AS IS".
|
||||
NO WARRANTY OF ANY KIND IS EXPRESSED OR IMPLIED. YOU USE AT
|
||||
YOUR OWN RISK. THE AUTHOR WILL NOT BE LIABLE FOR DATA LOSS,
|
||||
DAMAGES, LOSS OF PROFITS OR ANY OTHER KIND OF LOSS WHILE USING
|
||||
OR MISUSING THIS SOFTWARE.
|
||||
|
||||
Thank you for your interest in RAR and unrar.dll.
|
||||
|
||||
|
||||
Alexander L. Roshal
|
||||
@@ -1,140 +0,0 @@
|
||||
#ifndef _UNRAR_DLL_
|
||||
#define _UNRAR_DLL_
|
||||
|
||||
#define ERAR_END_ARCHIVE 10
|
||||
#define ERAR_NO_MEMORY 11
|
||||
#define ERAR_BAD_DATA 12
|
||||
#define ERAR_BAD_ARCHIVE 13
|
||||
#define ERAR_UNKNOWN_FORMAT 14
|
||||
#define ERAR_EOPEN 15
|
||||
#define ERAR_ECREATE 16
|
||||
#define ERAR_ECLOSE 17
|
||||
#define ERAR_EREAD 18
|
||||
#define ERAR_EWRITE 19
|
||||
#define ERAR_SMALL_BUF 20
|
||||
#define ERAR_UNKNOWN 21
|
||||
#define ERAR_MISSING_PASSWORD 22
|
||||
|
||||
#define RAR_OM_LIST 0
|
||||
#define RAR_OM_EXTRACT 1
|
||||
#define RAR_OM_LIST_INCSPLIT 2
|
||||
|
||||
#define RAR_SKIP 0
|
||||
#define RAR_TEST 1
|
||||
#define RAR_EXTRACT 2
|
||||
|
||||
#define RAR_VOL_ASK 0
|
||||
#define RAR_VOL_NOTIFY 1
|
||||
|
||||
#define RAR_DLL_VERSION 4
|
||||
|
||||
#ifdef _UNIX
|
||||
#define CALLBACK
|
||||
#define PASCAL
|
||||
#define LONG long
|
||||
#define HANDLE void *
|
||||
#define LPARAM long
|
||||
#define UINT unsigned int
|
||||
#endif
|
||||
|
||||
struct RARHeaderData
|
||||
{
|
||||
char ArcName[260];
|
||||
char FileName[260];
|
||||
unsigned int Flags;
|
||||
unsigned int PackSize;
|
||||
unsigned int UnpSize;
|
||||
unsigned int HostOS;
|
||||
unsigned int FileCRC;
|
||||
unsigned int FileTime;
|
||||
unsigned int UnpVer;
|
||||
unsigned int Method;
|
||||
unsigned int FileAttr;
|
||||
char *CmtBuf;
|
||||
unsigned int CmtBufSize;
|
||||
unsigned int CmtSize;
|
||||
unsigned int CmtState;
|
||||
};
|
||||
|
||||
|
||||
struct RARHeaderDataEx
|
||||
{
|
||||
char ArcName[1024];
|
||||
wchar_t ArcNameW[1024];
|
||||
char FileName[1024];
|
||||
wchar_t FileNameW[1024];
|
||||
unsigned int Flags;
|
||||
unsigned int PackSize;
|
||||
unsigned int PackSizeHigh;
|
||||
unsigned int UnpSize;
|
||||
unsigned int UnpSizeHigh;
|
||||
unsigned int HostOS;
|
||||
unsigned int FileCRC;
|
||||
unsigned int FileTime;
|
||||
unsigned int UnpVer;
|
||||
unsigned int Method;
|
||||
unsigned int FileAttr;
|
||||
char *CmtBuf;
|
||||
unsigned int CmtBufSize;
|
||||
unsigned int CmtSize;
|
||||
unsigned int CmtState;
|
||||
unsigned int Reserved[1024];
|
||||
};
|
||||
|
||||
|
||||
struct RAROpenArchiveData
|
||||
{
|
||||
char *ArcName;
|
||||
unsigned int OpenMode;
|
||||
unsigned int OpenResult;
|
||||
char *CmtBuf;
|
||||
unsigned int CmtBufSize;
|
||||
unsigned int CmtSize;
|
||||
unsigned int CmtState;
|
||||
};
|
||||
|
||||
struct RAROpenArchiveDataEx
|
||||
{
|
||||
char *ArcName;
|
||||
wchar_t *ArcNameW;
|
||||
unsigned int OpenMode;
|
||||
unsigned int OpenResult;
|
||||
char *CmtBuf;
|
||||
unsigned int CmtBufSize;
|
||||
unsigned int CmtSize;
|
||||
unsigned int CmtState;
|
||||
unsigned int Flags;
|
||||
unsigned int Reserved[32];
|
||||
};
|
||||
|
||||
enum UNRARCALLBACK_MESSAGES {
|
||||
UCM_CHANGEVOLUME,UCM_PROCESSDATA,UCM_NEEDPASSWORD
|
||||
};
|
||||
|
||||
typedef int (CALLBACK *UNRARCALLBACK)(UINT msg,LPARAM UserData,LPARAM P1,LPARAM P2);
|
||||
|
||||
typedef int (PASCAL *CHANGEVOLPROC)(char *ArcName,int Mode);
|
||||
typedef int (PASCAL *PROCESSDATAPROC)(unsigned char *Addr,int Size);
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
HANDLE PASCAL RAROpenArchive(struct RAROpenArchiveData *ArchiveData);
|
||||
HANDLE PASCAL RAROpenArchiveEx(struct RAROpenArchiveDataEx *ArchiveData);
|
||||
int PASCAL RARCloseArchive(HANDLE hArcData);
|
||||
int PASCAL RARReadHeader(HANDLE hArcData,struct RARHeaderData *HeaderData);
|
||||
int PASCAL RARReadHeaderEx(HANDLE hArcData,struct RARHeaderDataEx *HeaderData);
|
||||
int PASCAL RARProcessFile(HANDLE hArcData,int Operation,char *DestPath,char *DestName);
|
||||
int PASCAL RARProcessFileW(HANDLE hArcData,int Operation,wchar_t *DestPath,wchar_t *DestName);
|
||||
void PASCAL RARSetCallback(HANDLE hArcData,UNRARCALLBACK Callback,LPARAM UserData);
|
||||
void PASCAL RARSetChangeVolProc(HANDLE hArcData,CHANGEVOLPROC ChangeVolProc);
|
||||
void PASCAL RARSetProcessDataProc(HANDLE hArcData,PROCESSDATAPROC ProcessDataProc);
|
||||
void PASCAL RARSetPassword(HANDLE hArcData,char *Password);
|
||||
int PASCAL RARGetDllVersion();
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
@@ -1,606 +0,0 @@
|
||||
|
||||
UnRAR.dll Manual
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
UnRAR.dll is a 32-bit Windows dynamic-link library which provides
|
||||
file extraction from RAR archives.
|
||||
|
||||
|
||||
Exported functions
|
||||
|
||||
====================================================================
|
||||
HANDLE PASCAL RAROpenArchive(struct RAROpenArchiveData *ArchiveData)
|
||||
====================================================================
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~
|
||||
Open RAR archive and allocate memory structures
|
||||
|
||||
Parameters
|
||||
~~~~~~~~~~
|
||||
ArchiveData Points to RAROpenArchiveData structure
|
||||
|
||||
struct RAROpenArchiveData
|
||||
{
|
||||
char *ArcName;
|
||||
UINT OpenMode;
|
||||
UINT OpenResult;
|
||||
char *CmtBuf;
|
||||
UINT CmtBufSize;
|
||||
UINT CmtSize;
|
||||
UINT CmtState;
|
||||
};
|
||||
|
||||
Structure fields:
|
||||
|
||||
ArcName
|
||||
Input parameter which should point to zero terminated string
|
||||
containing the archive name.
|
||||
|
||||
OpenMode
|
||||
Input parameter.
|
||||
|
||||
Possible values
|
||||
|
||||
RAR_OM_LIST
|
||||
Open archive for reading file headers only.
|
||||
|
||||
RAR_OM_EXTRACT
|
||||
Open archive for testing and extracting files.
|
||||
|
||||
RAR_OM_LIST_INCSPLIT
|
||||
Open archive for reading file headers only. If you open an archive
|
||||
in such mode, RARReadHeader[Ex] will return all file headers,
|
||||
including those with "file continued from previous volume" flag.
|
||||
In case of RAR_OM_LIST such headers are automatically skipped.
|
||||
So if you process RAR volumes in RAR_OM_LIST_INCSPLIT mode, you will
|
||||
get several file header records for same file if file is split between
|
||||
volumes. For such files only the last file header record will contain
|
||||
the correct file CRC and if you wish to get the correct packed size,
|
||||
you need to sum up packed sizes of all parts.
|
||||
|
||||
OpenResult
|
||||
Output parameter.
|
||||
|
||||
Possible values
|
||||
|
||||
0 Success
|
||||
ERAR_NO_MEMORY Not enough memory to initialize data structures
|
||||
ERAR_BAD_DATA Archive header broken
|
||||
ERAR_BAD_ARCHIVE File is not valid RAR archive
|
||||
ERAR_UNKNOWN_FORMAT Unknown encryption used for archive headers
|
||||
ERAR_EOPEN File open error
|
||||
|
||||
CmtBuf
|
||||
Input parameter which should point to the buffer for archive
|
||||
comments. Maximum comment size is limited to 64Kb. Comment text is
|
||||
zero terminated. If the comment text is larger than the buffer
|
||||
size, the comment text will be truncated. If CmtBuf is set to
|
||||
NULL, comments will not be read.
|
||||
|
||||
CmtBufSize
|
||||
Input parameter which should contain size of buffer for archive
|
||||
comments.
|
||||
|
||||
CmtSize
|
||||
Output parameter containing size of comments actually read into the
|
||||
buffer, cannot exceed CmtBufSize.
|
||||
|
||||
CmtState
|
||||
Output parameter.
|
||||
|
||||
Possible values
|
||||
|
||||
0 comments not present
|
||||
1 Comments read completely
|
||||
ERAR_NO_MEMORY Not enough memory to extract comments
|
||||
ERAR_BAD_DATA Broken comment
|
||||
ERAR_UNKNOWN_FORMAT Unknown comment format
|
||||
ERAR_SMALL_BUF Buffer too small, comments not completely read
|
||||
|
||||
Return values
|
||||
~~~~~~~~~~~~~
|
||||
Archive handle or NULL in case of error
|
||||
|
||||
|
||||
========================================================================
|
||||
HANDLE PASCAL RAROpenArchiveEx(struct RAROpenArchiveDataEx *ArchiveData)
|
||||
========================================================================
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~
|
||||
Similar to RAROpenArchive, but uses RAROpenArchiveDataEx structure
|
||||
allowing to specify Unicode archive name and returning information
|
||||
about archive flags.
|
||||
|
||||
Parameters
|
||||
~~~~~~~~~~
|
||||
ArchiveData Points to RAROpenArchiveDataEx structure
|
||||
|
||||
struct RAROpenArchiveDataEx
|
||||
{
|
||||
char *ArcName;
|
||||
wchar_t *ArcNameW;
|
||||
unsigned int OpenMode;
|
||||
unsigned int OpenResult;
|
||||
char *CmtBuf;
|
||||
unsigned int CmtBufSize;
|
||||
unsigned int CmtSize;
|
||||
unsigned int CmtState;
|
||||
unsigned int Flags;
|
||||
unsigned int Reserved[32];
|
||||
};
|
||||
|
||||
Structure fields:
|
||||
|
||||
ArcNameW
|
||||
Input parameter which should point to zero terminated Unicode string
|
||||
containing the archive name or NULL if Unicode name is not specified.
|
||||
|
||||
Flags
|
||||
Output parameter. Combination of bit flags.
|
||||
|
||||
Possible values
|
||||
|
||||
0x0001 - Volume attribute (archive volume)
|
||||
0x0002 - Archive comment present
|
||||
0x0004 - Archive lock attribute
|
||||
0x0008 - Solid attribute (solid archive)
|
||||
0x0010 - New volume naming scheme ('volname.partN.rar')
|
||||
0x0020 - Authenticity information present
|
||||
0x0040 - Recovery record present
|
||||
0x0080 - Block headers are encrypted
|
||||
0x0100 - First volume (set only by RAR 3.0 and later)
|
||||
|
||||
Reserved[32]
|
||||
Reserved for future use. Must be zero.
|
||||
|
||||
Information on other structure fields and function return values
|
||||
is available above, in RAROpenArchive function description.
|
||||
|
||||
|
||||
====================================================================
|
||||
int PASCAL RARCloseArchive(HANDLE hArcData)
|
||||
====================================================================
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~
|
||||
Close RAR archive and release allocated memory. It must be called when
|
||||
archive processing is finished, even if the archive processing was stopped
|
||||
due to an error.
|
||||
|
||||
Parameters
|
||||
~~~~~~~~~~
|
||||
hArcData
|
||||
This parameter should contain the archive handle obtained from the
|
||||
RAROpenArchive function call.
|
||||
|
||||
Return values
|
||||
~~~~~~~~~~~~~
|
||||
0 Success
|
||||
ERAR_ECLOSE Archive close error
|
||||
|
||||
|
||||
====================================================================
|
||||
int PASCAL RARReadHeader(HANDLE hArcData,
|
||||
struct RARHeaderData *HeaderData)
|
||||
====================================================================
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~
|
||||
Read header of file in archive.
|
||||
|
||||
Parameters
|
||||
~~~~~~~~~~
|
||||
hArcData
|
||||
This parameter should contain the archive handle obtained from the
|
||||
RAROpenArchive function call.
|
||||
|
||||
HeaderData
|
||||
It should point to RARHeaderData structure:
|
||||
|
||||
struct RARHeaderData
|
||||
{
|
||||
char ArcName[260];
|
||||
char FileName[260];
|
||||
UINT Flags;
|
||||
UINT PackSize;
|
||||
UINT UnpSize;
|
||||
UINT HostOS;
|
||||
UINT FileCRC;
|
||||
UINT FileTime;
|
||||
UINT UnpVer;
|
||||
UINT Method;
|
||||
UINT FileAttr;
|
||||
char *CmtBuf;
|
||||
UINT CmtBufSize;
|
||||
UINT CmtSize;
|
||||
UINT CmtState;
|
||||
};
|
||||
|
||||
Structure fields:
|
||||
|
||||
ArcName
|
||||
Output parameter which contains a zero terminated string of the
|
||||
current archive name. May be used to determine the current volume
|
||||
name.
|
||||
|
||||
FileName
|
||||
Output parameter which contains a zero terminated string of the
|
||||
file name in OEM (DOS) encoding.
|
||||
|
||||
Flags
|
||||
Output parameter which contains file flags:
|
||||
|
||||
0x01 - file continued from previous volume
|
||||
0x02 - file continued on next volume
|
||||
0x04 - file encrypted with password
|
||||
0x08 - file comment present
|
||||
0x10 - compression of previous files is used (solid flag)
|
||||
|
||||
bits 7 6 5
|
||||
|
||||
0 0 0 - dictionary size 64 Kb
|
||||
0 0 1 - dictionary size 128 Kb
|
||||
0 1 0 - dictionary size 256 Kb
|
||||
0 1 1 - dictionary size 512 Kb
|
||||
1 0 0 - dictionary size 1024 Kb
|
||||
1 0 1 - dictionary size 2048 KB
|
||||
1 1 0 - dictionary size 4096 KB
|
||||
1 1 1 - file is directory
|
||||
|
||||
Other bits are reserved.
|
||||
|
||||
PackSize
|
||||
Output parameter means packed file size or size of the
|
||||
file part if file was split between volumes.
|
||||
|
||||
UnpSize
|
||||
Output parameter - unpacked file size.
|
||||
|
||||
HostOS
|
||||
Output parameter - operating system used for archiving:
|
||||
|
||||
0 - MS DOS;
|
||||
1 - OS/2.
|
||||
2 - Win32
|
||||
3 - Unix
|
||||
|
||||
FileCRC
|
||||
Output parameter which contains unpacked file CRC. In case of file parts
|
||||
split between volumes only the last part contains the correct CRC
|
||||
and it is accessible only in RAR_OM_LIST_INCSPLIT listing mode.
|
||||
|
||||
FileTime
|
||||
Output parameter - contains date and time in standard MS DOS format.
|
||||
|
||||
UnpVer
|
||||
Output parameter - RAR version needed to extract file.
|
||||
It is encoded as 10 * Major version + minor version.
|
||||
|
||||
Method
|
||||
Output parameter - packing method.
|
||||
|
||||
FileAttr
|
||||
Output parameter - file attributes.
|
||||
|
||||
CmtBuf
|
||||
File comments support is not implemented in the new DLL version yet.
|
||||
Now CmtState is always 0.
|
||||
|
||||
/*
|
||||
* Input parameter which should point to the buffer for file
|
||||
* comments. Maximum comment size is limited to 64Kb. Comment text is
|
||||
* a zero terminated string in OEM encoding. If the comment text is
|
||||
* larger than the buffer size, the comment text will be truncated.
|
||||
* If CmtBuf is set to NULL, comments will not be read.
|
||||
*/
|
||||
|
||||
CmtBufSize
|
||||
Input parameter which should contain size of buffer for archive
|
||||
comments.
|
||||
|
||||
CmtSize
|
||||
Output parameter containing size of comments actually read into the
|
||||
buffer, should not exceed CmtBufSize.
|
||||
|
||||
CmtState
|
||||
Output parameter.
|
||||
|
||||
Possible values
|
||||
|
||||
0 Absent comments
|
||||
1 Comments read completely
|
||||
ERAR_NO_MEMORY Not enough memory to extract comments
|
||||
ERAR_BAD_DATA Broken comment
|
||||
ERAR_UNKNOWN_FORMAT Unknown comment format
|
||||
ERAR_SMALL_BUF Buffer too small, comments not completely read
|
||||
|
||||
Return values
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
0 Success
|
||||
ERAR_END_ARCHIVE End of archive
|
||||
ERAR_BAD_DATA File header broken
|
||||
|
||||
|
||||
====================================================================
|
||||
int PASCAL RARReadHeaderEx(HANDLE hArcData,
|
||||
struct RARHeaderDataEx *HeaderData)
|
||||
====================================================================
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~
|
||||
Similar to RARReadHeader, but uses RARHeaderDataEx structure,
|
||||
containing information about Unicode file names and 64 bit file sizes.
|
||||
|
||||
struct RARHeaderDataEx
|
||||
{
|
||||
char ArcName[1024];
|
||||
wchar_t ArcNameW[1024];
|
||||
char FileName[1024];
|
||||
wchar_t FileNameW[1024];
|
||||
unsigned int Flags;
|
||||
unsigned int PackSize;
|
||||
unsigned int PackSizeHigh;
|
||||
unsigned int UnpSize;
|
||||
unsigned int UnpSizeHigh;
|
||||
unsigned int HostOS;
|
||||
unsigned int FileCRC;
|
||||
unsigned int FileTime;
|
||||
unsigned int UnpVer;
|
||||
unsigned int Method;
|
||||
unsigned int FileAttr;
|
||||
char *CmtBuf;
|
||||
unsigned int CmtBufSize;
|
||||
unsigned int CmtSize;
|
||||
unsigned int CmtState;
|
||||
unsigned int Reserved[1024];
|
||||
};
|
||||
|
||||
|
||||
====================================================================
|
||||
int PASCAL RARProcessFile(HANDLE hArcData,
|
||||
int Operation,
|
||||
char *DestPath,
|
||||
char *DestName)
|
||||
====================================================================
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~
|
||||
Performs action and moves the current position in the archive to
|
||||
the next file. Extract or test the current file from the archive
|
||||
opened in RAR_OM_EXTRACT mode. If the mode RAR_OM_LIST is set,
|
||||
then a call to this function will simply skip the archive position
|
||||
to the next file.
|
||||
|
||||
Parameters
|
||||
~~~~~~~~~~
|
||||
hArcData
|
||||
This parameter should contain the archive handle obtained from the
|
||||
RAROpenArchive function call.
|
||||
|
||||
Operation
|
||||
File operation.
|
||||
|
||||
Possible values
|
||||
|
||||
RAR_SKIP Move to the next file in the archive. If the
|
||||
archive is solid and RAR_OM_EXTRACT mode was set
|
||||
when the archive was opened, the current file will
|
||||
be processed - the operation will be performed
|
||||
slower than a simple seek.
|
||||
|
||||
RAR_TEST Test the current file and move to the next file in
|
||||
the archive. If the archive was opened with
|
||||
RAR_OM_LIST mode, the operation is equal to
|
||||
RAR_SKIP.
|
||||
|
||||
RAR_EXTRACT Extract the current file and move to the next file.
|
||||
If the archive was opened with RAR_OM_LIST mode,
|
||||
the operation is equal to RAR_SKIP.
|
||||
|
||||
|
||||
DestPath
|
||||
This parameter should point to a zero terminated string containing the
|
||||
destination directory to which to extract files to. If DestPath is equal
|
||||
to NULL, it means extract to the current directory. This parameter has
|
||||
meaning only if DestName is NULL.
|
||||
|
||||
DestName
|
||||
This parameter should point to a string containing the full path and name
|
||||
to assign to extracted file or it can be NULL to use the default name.
|
||||
If DestName is defined (not NULL), it overrides both the original file
|
||||
name saved in the archive and path specigied in DestPath setting.
|
||||
|
||||
Both DestPath and DestName must be in OEM encoding. If necessary,
|
||||
use CharToOem to convert text to OEM before passing to this function.
|
||||
|
||||
Return values
|
||||
~~~~~~~~~~~~~
|
||||
0 Success
|
||||
ERAR_BAD_DATA File CRC error
|
||||
ERAR_BAD_ARCHIVE Volume is not valid RAR archive
|
||||
ERAR_UNKNOWN_FORMAT Unknown archive format
|
||||
ERAR_EOPEN Volume open error
|
||||
ERAR_ECREATE File create error
|
||||
ERAR_ECLOSE File close error
|
||||
ERAR_EREAD Read error
|
||||
ERAR_EWRITE Write error
|
||||
|
||||
|
||||
Note: if you wish to cancel extraction, return -1 when processing
|
||||
UCM_PROCESSDATA callback message.
|
||||
|
||||
|
||||
====================================================================
|
||||
int PASCAL RARProcessFileW(HANDLE hArcData,
|
||||
int Operation,
|
||||
wchar_t *DestPath,
|
||||
wchar_t *DestName)
|
||||
====================================================================
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~
|
||||
Unicode version of RARProcessFile. It uses Unicode DestPath
|
||||
and DestName parameters, other parameters and return values
|
||||
are the same as in RARProcessFile.
|
||||
|
||||
|
||||
====================================================================
|
||||
void PASCAL RARSetCallback(HANDLE hArcData,
|
||||
int PASCAL (*CallbackProc)(UINT msg,LPARAM UserData,LPARAM P1,LPARAM P2),
|
||||
LPARAM UserData);
|
||||
====================================================================
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~
|
||||
Set a user-defined callback function to process Unrar events.
|
||||
|
||||
Parameters
|
||||
~~~~~~~~~~
|
||||
hArcData
|
||||
This parameter should contain the archive handle obtained from the
|
||||
RAROpenArchive function call.
|
||||
|
||||
CallbackProc
|
||||
It should point to a user-defined callback function.
|
||||
|
||||
The function will be passed four parameters:
|
||||
|
||||
|
||||
msg Type of event. Described below.
|
||||
|
||||
UserData User defined value passed to RARSetCallback.
|
||||
|
||||
P1 and P2 Event dependent parameters. Described below.
|
||||
|
||||
|
||||
Possible events
|
||||
|
||||
UCM_CHANGEVOLUME Process volume change.
|
||||
|
||||
P1 Points to the zero terminated name
|
||||
of the next volume.
|
||||
|
||||
P2 The function call mode:
|
||||
|
||||
RAR_VOL_ASK Required volume is absent. The function should
|
||||
prompt user and return a positive value
|
||||
to retry or return -1 value to terminate
|
||||
operation. The function may also specify a new
|
||||
volume name, placing it to the address specified
|
||||
by P1 parameter.
|
||||
|
||||
RAR_VOL_NOTIFY Required volume is successfully opened.
|
||||
This is a notification call and volume name
|
||||
modification is not allowed. The function should
|
||||
return a positive value to continue or -1
|
||||
to terminate operation.
|
||||
|
||||
UCM_PROCESSDATA Process unpacked data. It may be used to read
|
||||
a file while it is being extracted or tested
|
||||
without actual extracting file to disk.
|
||||
Return a positive value to continue process
|
||||
or -1 to cancel the archive operation
|
||||
|
||||
P1 Address pointing to the unpacked data.
|
||||
Function may refer to the data but must not
|
||||
change it.
|
||||
|
||||
P2 Size of the unpacked data. It is guaranteed
|
||||
only that the size will not exceed the maximum
|
||||
dictionary size (4 Mb in RAR 3.0).
|
||||
|
||||
UCM_NEEDPASSWORD DLL needs a password to process archive.
|
||||
This message must be processed if you wish
|
||||
to be able to handle archives with encrypted
|
||||
file names. It can be also used as replacement
|
||||
of RARSetPassword function even for usual
|
||||
encrypted files with non-encrypted names.
|
||||
|
||||
P1 Address pointing to the buffer for a password.
|
||||
You need to copy a password here.
|
||||
|
||||
P2 Size of the password buffer.
|
||||
|
||||
|
||||
UserData
|
||||
User data passed to callback function.
|
||||
|
||||
Other functions of UnRAR.dll should not be called from the callback
|
||||
function.
|
||||
|
||||
Return values
|
||||
~~~~~~~~~~~~~
|
||||
None
|
||||
|
||||
|
||||
|
||||
====================================================================
|
||||
void PASCAL RARSetChangeVolProc(HANDLE hArcData,
|
||||
int PASCAL (*ChangeVolProc)(char *ArcName,int Mode));
|
||||
====================================================================
|
||||
|
||||
Obsoleted, use RARSetCallback instead.
|
||||
|
||||
|
||||
|
||||
====================================================================
|
||||
void PASCAL RARSetProcessDataProc(HANDLE hArcData,
|
||||
int PASCAL (*ProcessDataProc)(unsigned char *Addr,int Size))
|
||||
====================================================================
|
||||
|
||||
Obsoleted, use RARSetCallback instead.
|
||||
|
||||
|
||||
====================================================================
|
||||
void PASCAL RARSetPassword(HANDLE hArcData,
|
||||
char *Password);
|
||||
====================================================================
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~
|
||||
Set a password to decrypt files.
|
||||
|
||||
Parameters
|
||||
~~~~~~~~~~
|
||||
hArcData
|
||||
This parameter should contain the archive handle obtained from the
|
||||
RAROpenArchive function call.
|
||||
|
||||
Password
|
||||
It should point to a string containing a zero terminated password.
|
||||
|
||||
Return values
|
||||
~~~~~~~~~~~~~
|
||||
None
|
||||
|
||||
|
||||
====================================================================
|
||||
void PASCAL RARGetDllVersion();
|
||||
====================================================================
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~
|
||||
Returns API version.
|
||||
|
||||
Parameters
|
||||
~~~~~~~~~~
|
||||
None.
|
||||
|
||||
Return values
|
||||
~~~~~~~~~~~~~
|
||||
Returns an integer value denoting UnRAR.dll API version, which is also
|
||||
defined in unrar.h as RAR_DLL_VERSION. API version number is incremented
|
||||
only in case of noticeable changes in UnRAR.dll API. Do not confuse it
|
||||
with version of UnRAR.dll stored in DLL resources, which is incremented
|
||||
with every DLL rebuild.
|
||||
|
||||
If RARGetDllVersion() returns a value lower than UnRAR.dll which your
|
||||
application was designed for, it may indicate that DLL version is too old
|
||||
and it will fail to provide all necessary functions to your application.
|
||||
|
||||
This function is absent in old versions of UnRAR.dll, so it is safer
|
||||
to use LoadLibrary and GetProcAddress to access this function.
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
List of unrar.dll API changes. We do not include performance and reliability
|
||||
improvements into this list, but this library and RAR/UnRAR tools share
|
||||
the same source code. So the latest version of unrar.dll usually contains
|
||||
same decompression algorithm changes as the latest UnRAR version.
|
||||
============================================================================
|
||||
|
||||
-- 18 January 2008
|
||||
|
||||
all LONG parameters of CallbackProc function were changed
|
||||
to LPARAM type for 64 bit mode compatibility.
|
||||
|
||||
|
||||
-- 12 December 2007
|
||||
|
||||
Added new RAR_OM_LIST_INCSPLIT open mode for function RAROpenArchive.
|
||||
|
||||
|
||||
-- 14 August 2007
|
||||
|
||||
Added NoCrypt\unrar_nocrypt.dll without decryption code for those
|
||||
applications where presence of encryption or decryption code is not
|
||||
allowed because of legal restrictions.
|
||||
|
||||
|
||||
-- 14 December 2006
|
||||
|
||||
Added ERAR_MISSING_PASSWORD error type. This error is returned
|
||||
if empty password is specified for encrypted file.
|
||||
|
||||
|
||||
-- 12 June 2003
|
||||
|
||||
Added RARProcessFileW function, Unicode version of RARProcessFile
|
||||
|
||||
|
||||
-- 9 August 2002
|
||||
|
||||
Added RAROpenArchiveEx function allowing to specify Unicode archive
|
||||
name and get archive flags.
|
||||
|
||||
|
||||
-- 24 January 2002
|
||||
|
||||
Added RARReadHeaderEx function allowing to read Unicode file names
|
||||
and 64 bit file sizes.
|
||||
|
||||
|
||||
-- 23 January 2002
|
||||
|
||||
Added ERAR_UNKNOWN error type (it is used for all errors which
|
||||
do not have special ERAR code yet) and UCM_NEEDPASSWORD callback
|
||||
message.
|
||||
|
||||
Unrar.dll automatically opens all next volumes not only when extracting,
|
||||
but also in RAR_OM_LIST mode.
|
||||
|
||||
|
||||
-- 27 November 2001
|
||||
|
||||
RARSetChangeVolProc and RARSetProcessDataProc are replaced by
|
||||
the single callback function installed with RARSetCallback.
|
||||
Unlike old style callbacks, the new function accepts the user defined
|
||||
parameter. Unrar.dll still supports RARSetChangeVolProc and
|
||||
RARSetProcessDataProc for compatibility purposes, but if you write
|
||||
a new application, better use RARSetCallback.
|
||||
|
||||
File comments support is not implemented in the new DLL version yet.
|
||||
Now CmtState is always 0.
|
||||
|
||||
|
||||
-- 13 August 2001
|
||||
|
||||
Added RARGetDllVersion function, so you may distinguish old unrar.dll,
|
||||
which used C style callback functions and the new one with PASCAL callbacks.
|
||||
|
||||
|
||||
-- 10 May 2001
|
||||
|
||||
Callback functions in RARSetChangeVolProc and RARSetProcessDataProc
|
||||
use PASCAL style call convention now.
|
||||
@@ -1 +0,0 @@
|
||||
This is x64 version of unrar.dll.
|
||||
@@ -1,177 +0,0 @@
|
||||
# Copyright (c) 2003-2005 Jimmy Retzlaff, 2008 Konstantin Yegupov
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""
|
||||
pyUnRAR2 is a ctypes based wrapper around the free UnRAR.dll.
|
||||
|
||||
It is an modified version of Jimmy Retzlaff's pyUnRAR - more simple,
|
||||
stable and foolproof.
|
||||
Notice that it has INCOMPATIBLE interface.
|
||||
|
||||
It enables reading and unpacking of archives created with the
|
||||
RAR/WinRAR archivers. There is a low-level interface which is very
|
||||
similar to the C interface provided by UnRAR. There is also a
|
||||
higher level interface which makes some common operations easier.
|
||||
"""
|
||||
|
||||
__version__ = '0.99.2'
|
||||
|
||||
try:
|
||||
WindowsError
|
||||
in_windows = True
|
||||
except NameError:
|
||||
in_windows = False
|
||||
|
||||
if in_windows:
|
||||
from windows import RarFileImplementation
|
||||
else:
|
||||
from unix import RarFileImplementation
|
||||
|
||||
|
||||
import fnmatch, time, weakref
|
||||
|
||||
class RarInfo(object):
|
||||
"""Represents a file header in an archive. Don't instantiate directly.
|
||||
Use only to obtain information about file.
|
||||
YOU CANNOT EXTRACT FILE CONTENTS USING THIS OBJECT.
|
||||
USE METHODS OF RarFile CLASS INSTEAD.
|
||||
|
||||
Properties:
|
||||
index - index of file within the archive
|
||||
filename - name of the file in the archive including path (if any)
|
||||
datetime - file date/time as a struct_time suitable for time.strftime
|
||||
isdir - True if the file is a directory
|
||||
size - size in bytes of the uncompressed file
|
||||
comment - comment associated with the file
|
||||
|
||||
Note - this is not currently intended to be a Python file-like object.
|
||||
"""
|
||||
|
||||
def __init__(self, rarfile, data):
|
||||
self.rarfile = weakref.proxy(rarfile)
|
||||
self.index = data['index']
|
||||
self.filename = data['filename']
|
||||
self.isdir = data['isdir']
|
||||
self.size = data['size']
|
||||
self.datetime = data['datetime']
|
||||
self.comment = data['comment']
|
||||
|
||||
|
||||
|
||||
def __str__(self):
|
||||
try :
|
||||
arcName = self.rarfile.archiveName
|
||||
except ReferenceError:
|
||||
arcName = "[ARCHIVE_NO_LONGER_LOADED]"
|
||||
return '<RarInfo "%s" in "%s">' % (self.filename, arcName)
|
||||
|
||||
class RarFile(RarFileImplementation):
|
||||
|
||||
def __init__(self, archiveName, password=None):
|
||||
"""Instantiate the archive.
|
||||
|
||||
archiveName is the name of the RAR file.
|
||||
password is used to decrypt the files in the archive.
|
||||
|
||||
Properties:
|
||||
comment - comment associated with the archive
|
||||
|
||||
>>> print RarFile('test.rar').comment
|
||||
This is a test.
|
||||
"""
|
||||
self.archiveName = archiveName
|
||||
RarFileImplementation.init(self, password)
|
||||
|
||||
def __del__(self):
|
||||
self.destruct()
|
||||
|
||||
def infoiter(self):
|
||||
"""Iterate over all the files in the archive, generating RarInfos.
|
||||
|
||||
>>> import os
|
||||
>>> for fileInArchive in RarFile('test.rar').infoiter():
|
||||
... print os.path.split(fileInArchive.filename)[-1],
|
||||
... print fileInArchive.isdir,
|
||||
... print fileInArchive.size,
|
||||
... print fileInArchive.comment,
|
||||
... print tuple(fileInArchive.datetime)[0:5],
|
||||
... print time.strftime('%a, %d %b %Y %H:%M', fileInArchive.datetime)
|
||||
test True 0 None (2003, 6, 30, 1, 59) Mon, 30 Jun 2003 01:59
|
||||
test.txt False 20 None (2003, 6, 30, 2, 1) Mon, 30 Jun 2003 02:01
|
||||
this.py False 1030 None (2002, 2, 8, 16, 47) Fri, 08 Feb 2002 16:47
|
||||
"""
|
||||
for params in RarFileImplementation.infoiter(self):
|
||||
yield RarInfo(self, params)
|
||||
|
||||
def infolist(self):
|
||||
"""Return a list of RarInfos, descripting the contents of the archive."""
|
||||
return list(self.infoiter())
|
||||
|
||||
def read_files(self, condition='*'):
|
||||
"""Read specific files from archive into memory.
|
||||
If "condition" is a list of numbers, then return files which have those positions in infolist.
|
||||
If "condition" is a string, then it is treated as a wildcard for names of files to extract.
|
||||
If "condition" is a function, it is treated as a callback function, which accepts a RarInfo object
|
||||
and returns boolean True (extract) or False (skip).
|
||||
If "condition" is omitted, all files are returned.
|
||||
|
||||
Returns list of tuples (RarInfo info, str contents)
|
||||
"""
|
||||
checker = condition2checker(condition)
|
||||
return RarFileImplementation.read_files(self, checker)
|
||||
|
||||
|
||||
def extract(self, condition='*', path='.', withSubpath=True, overwrite=True):
|
||||
"""Extract specific files from archive to disk.
|
||||
|
||||
If "condition" is a list of numbers, then extract files which have those positions in infolist.
|
||||
If "condition" is a string, then it is treated as a wildcard for names of files to extract.
|
||||
If "condition" is a function, it is treated as a callback function, which accepts a RarInfo object
|
||||
and returns either boolean True (extract) or boolean False (skip).
|
||||
DEPRECATED: If "condition" callback returns string (only supported for Windows) -
|
||||
that string will be used as a new name to save the file under.
|
||||
If "condition" is omitted, all files are extracted.
|
||||
|
||||
"path" is a directory to extract to
|
||||
"withSubpath" flag denotes whether files are extracted with their full path in the archive.
|
||||
"overwrite" flag denotes whether extracted files will overwrite old ones. Defaults to true.
|
||||
|
||||
Returns list of RarInfos for extracted files."""
|
||||
checker = condition2checker(condition)
|
||||
return RarFileImplementation.extract(self, checker, path, withSubpath, overwrite)
|
||||
|
||||
def condition2checker(condition):
|
||||
"""Converts different condition types to callback"""
|
||||
if type(condition) in [str, unicode]:
|
||||
def smatcher(info):
|
||||
return fnmatch.fnmatch(info.filename, condition)
|
||||
return smatcher
|
||||
elif type(condition) in [list, tuple] and type(condition[0]) in [int, long]:
|
||||
def imatcher(info):
|
||||
return info.index in condition
|
||||
return imatcher
|
||||
elif callable(condition):
|
||||
return condition
|
||||
else:
|
||||
raise TypeError
|
||||
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
# Copyright (c) 2003-2005 Jimmy Retzlaff, 2008 Konstantin Yegupov
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
# Low level interface - see UnRARDLL\UNRARDLL.TXT
|
||||
|
||||
|
||||
class ArchiveHeaderBroken(Exception): pass
|
||||
class InvalidRARArchive(Exception): pass
|
||||
class FileOpenError(Exception): pass
|
||||
class IncorrectRARPassword(Exception): pass
|
||||
class InvalidRARArchiveUsage(Exception): pass
|
||||
@@ -1,139 +0,0 @@
|
||||
import os, sys
|
||||
|
||||
import UnRAR2
|
||||
from UnRAR2.rar_exceptions import *
|
||||
|
||||
|
||||
def cleanup(dir='test'):
|
||||
for path, dirs, files in os.walk(dir):
|
||||
for fn in files:
|
||||
os.remove(os.path.join(path, fn))
|
||||
for dir in dirs:
|
||||
os.removedirs(os.path.join(path, dir))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# reuse RarArchive object, en
|
||||
cleanup()
|
||||
rarc = UnRAR2.RarFile('test.rar')
|
||||
rarc.infolist()
|
||||
for info in rarc.infoiter():
|
||||
saveinfo = info
|
||||
assert (str(info)=="""<RarInfo "test" in "test.rar">""")
|
||||
break
|
||||
rarc.extract()
|
||||
assert os.path.exists('test'+os.sep+'test.txt')
|
||||
assert os.path.exists('test'+os.sep+'this.py')
|
||||
del rarc
|
||||
assert (str(saveinfo)=="""<RarInfo "test" in "[ARCHIVE_NO_LONGER_LOADED]">""")
|
||||
cleanup()
|
||||
|
||||
# extract all the files in test.rar
|
||||
cleanup()
|
||||
UnRAR2.RarFile('test.rar').extract()
|
||||
assert os.path.exists('test'+os.sep+'test.txt')
|
||||
assert os.path.exists('test'+os.sep+'this.py')
|
||||
cleanup()
|
||||
|
||||
# extract all the files in test.rar matching the wildcard *.txt
|
||||
cleanup()
|
||||
UnRAR2.RarFile('test.rar').extract('*.txt')
|
||||
assert os.path.exists('test'+os.sep+'test.txt')
|
||||
assert not os.path.exists('test'+os.sep+'this.py')
|
||||
cleanup()
|
||||
|
||||
|
||||
# check the name and size of each file, extracting small ones
|
||||
cleanup()
|
||||
archive = UnRAR2.RarFile('test.rar')
|
||||
assert archive.comment == 'This is a test.'
|
||||
archive.extract(lambda rarinfo: rarinfo.size <= 1024)
|
||||
for rarinfo in archive.infoiter():
|
||||
if rarinfo.size <= 1024 and not rarinfo.isdir:
|
||||
assert rarinfo.size == os.stat(rarinfo.filename).st_size
|
||||
assert file('test'+os.sep+'test.txt', 'rt').read() == 'This is only a test.'
|
||||
assert not os.path.exists('test'+os.sep+'this.py')
|
||||
cleanup()
|
||||
|
||||
|
||||
# extract this.py, overriding it's destination
|
||||
cleanup('test2')
|
||||
archive = UnRAR2.RarFile('test.rar')
|
||||
archive.extract('*.py', 'test2', False)
|
||||
assert os.path.exists('test2'+os.sep+'this.py')
|
||||
cleanup('test2')
|
||||
|
||||
|
||||
# extract test.txt to memory
|
||||
cleanup()
|
||||
archive = UnRAR2.RarFile('test.rar')
|
||||
entries = UnRAR2.RarFile('test.rar').read_files('*test.txt')
|
||||
assert len(entries)==1
|
||||
assert entries[0][0].filename.endswith('test.txt')
|
||||
assert entries[0][1]=='This is only a test.'
|
||||
|
||||
|
||||
# extract all the files in test.rar with overwriting
|
||||
cleanup()
|
||||
fo = open('test'+os.sep+'test.txt',"wt")
|
||||
fo.write("blah")
|
||||
fo.close()
|
||||
UnRAR2.RarFile('test.rar').extract('*.txt')
|
||||
assert open('test'+os.sep+'test.txt',"rt").read()!="blah"
|
||||
cleanup()
|
||||
|
||||
# extract all the files in test.rar without overwriting
|
||||
cleanup()
|
||||
fo = open('test'+os.sep+'test.txt',"wt")
|
||||
fo.write("blahblah")
|
||||
fo.close()
|
||||
UnRAR2.RarFile('test.rar').extract('*.txt', overwrite = False)
|
||||
assert open('test'+os.sep+'test.txt',"rt").read()=="blahblah"
|
||||
cleanup()
|
||||
|
||||
# list big file in an archive
|
||||
list(UnRAR2.RarFile('test_nulls.rar').infoiter())
|
||||
|
||||
# extract files from an archive with protected files
|
||||
cleanup()
|
||||
UnRAR2.RarFile('test_protected_files.rar', password="protected").extract()
|
||||
assert os.path.exists('test'+os.sep+'top_secret_xxx_file.txt')
|
||||
cleanup()
|
||||
errored = False
|
||||
try:
|
||||
UnRAR2.RarFile('test_protected_files.rar', password="proteqted").extract()
|
||||
except IncorrectRARPassword:
|
||||
errored = True
|
||||
assert not os.path.exists('test'+os.sep+'top_secret_xxx_file.txt')
|
||||
assert errored
|
||||
cleanup()
|
||||
|
||||
# extract files from an archive with protected headers
|
||||
cleanup()
|
||||
UnRAR2.RarFile('test_protected_headers.rar', password="secret").extract()
|
||||
assert os.path.exists('test'+os.sep+'top_secret_xxx_file.txt')
|
||||
cleanup()
|
||||
errored = False
|
||||
try:
|
||||
UnRAR2.RarFile('test_protected_headers.rar', password="seqret").extract()
|
||||
except IncorrectRARPassword:
|
||||
errored = True
|
||||
assert not os.path.exists('test'+os.sep+'top_secret_xxx_file.txt')
|
||||
assert errored
|
||||
cleanup()
|
||||
|
||||
# make sure docstring examples are working
|
||||
import doctest
|
||||
doctest.testmod(UnRAR2)
|
||||
|
||||
# update documentation
|
||||
import pydoc
|
||||
pydoc.writedoc(UnRAR2)
|
||||
|
||||
# cleanup
|
||||
try:
|
||||
os.remove('__init__.pyc')
|
||||
except:
|
||||
pass
|
||||
175
UnRAR2/unix.py
@@ -1,175 +0,0 @@
|
||||
# Copyright (c) 2003-2005 Jimmy Retzlaff, 2008 Konstantin Yegupov
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
# Unix version uses unrar command line executable
|
||||
|
||||
import subprocess
|
||||
import gc
|
||||
|
||||
import os, os.path
|
||||
import time, re
|
||||
|
||||
from rar_exceptions import *
|
||||
|
||||
class UnpackerNotInstalled(Exception): pass
|
||||
|
||||
rar_executable_cached = None
|
||||
|
||||
def call_unrar(params):
|
||||
"Calls rar/unrar command line executable, returns stdout pipe"
|
||||
global rar_executable_cached
|
||||
if rar_executable_cached is None:
|
||||
for command in ('unrar', 'rar'):
|
||||
try:
|
||||
subprocess.Popen([command], stdout=subprocess.PIPE)
|
||||
rar_executable_cached = command
|
||||
break
|
||||
except OSError:
|
||||
pass
|
||||
if rar_executable_cached is None:
|
||||
raise UnpackerNotInstalled("No suitable RAR unpacker installed")
|
||||
|
||||
assert type(params) == list, "params must be list"
|
||||
args = [rar_executable_cached] + params
|
||||
try:
|
||||
gc.disable() # See http://bugs.python.org/issue1336
|
||||
return subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
finally:
|
||||
gc.enable()
|
||||
|
||||
class RarFileImplementation(object):
|
||||
|
||||
def init(self, password=None):
|
||||
self.password = password
|
||||
|
||||
|
||||
|
||||
stdoutdata, stderrdata = self.call('v', []).communicate()
|
||||
|
||||
for line in stderrdata.splitlines():
|
||||
if line.strip().startswith("Cannot open"):
|
||||
raise FileOpenError
|
||||
if line.find("CRC failed")>=0:
|
||||
raise IncorrectRARPassword
|
||||
accum = []
|
||||
source = iter(stdoutdata.splitlines())
|
||||
line = ''
|
||||
while not (line.startswith('Comment:') or line.startswith('Pathname/Comment')):
|
||||
if line.strip().endswith('is not RAR archive'):
|
||||
raise InvalidRARArchive
|
||||
line = source.next()
|
||||
while not line.startswith('Pathname/Comment'):
|
||||
accum.append(line.rstrip('\n'))
|
||||
line = source.next()
|
||||
if len(accum):
|
||||
accum[0] = accum[0][9:]
|
||||
self.comment = '\n'.join(accum[:-1])
|
||||
else:
|
||||
self.comment = None
|
||||
|
||||
def escaped_password(self):
|
||||
return '-' if self.password == None else self.password
|
||||
|
||||
|
||||
def call(self, cmd, options=[], files=[]):
|
||||
options2 = options + ['p'+self.escaped_password()]
|
||||
soptions = ['-'+x for x in options2]
|
||||
return call_unrar([cmd]+soptions+['--',self.archiveName]+files)
|
||||
|
||||
def infoiter(self):
|
||||
|
||||
stdoutdata, stderrdata = self.call('v', ['c-']).communicate()
|
||||
|
||||
for line in stderrdata.splitlines():
|
||||
if line.strip().startswith("Cannot open"):
|
||||
raise FileOpenError
|
||||
|
||||
accum = []
|
||||
source = iter(stdoutdata.splitlines())
|
||||
line = ''
|
||||
while not line.startswith('--------------'):
|
||||
if line.strip().endswith('is not RAR archive'):
|
||||
raise InvalidRARArchive
|
||||
if line.find("CRC failed")>=0:
|
||||
raise IncorrectRARPassword
|
||||
line = source.next()
|
||||
line = source.next()
|
||||
i = 0
|
||||
re_spaces = re.compile(r"\s+")
|
||||
while not line.startswith('--------------'):
|
||||
accum.append(line)
|
||||
if len(accum)==2:
|
||||
data = {}
|
||||
data['index'] = i
|
||||
data['filename'] = accum[0].strip()
|
||||
info = re_spaces.split(accum[1].strip())
|
||||
data['size'] = int(info[0])
|
||||
attr = info[5]
|
||||
data['isdir'] = 'd' in attr.lower()
|
||||
data['datetime'] = time.strptime(info[3]+" "+info[4], '%d-%m-%y %H:%M')
|
||||
data['comment'] = None
|
||||
yield data
|
||||
accum = []
|
||||
i += 1
|
||||
line = source.next()
|
||||
|
||||
def read_files(self, checker):
|
||||
res = []
|
||||
for info in self.infoiter():
|
||||
checkres = checker(info)
|
||||
if checkres==True and not info.isdir:
|
||||
pipe = self.call('p', ['inul'], [info.filename]).stdout
|
||||
res.append((info, pipe.read()))
|
||||
return res
|
||||
|
||||
|
||||
def extract(self, checker, path, withSubpath, overwrite):
|
||||
res = []
|
||||
command = 'x'
|
||||
if not withSubpath:
|
||||
command = 'e'
|
||||
options = []
|
||||
if overwrite:
|
||||
options.append('o+')
|
||||
else:
|
||||
options.append('o-')
|
||||
if not path.endswith(os.sep):
|
||||
path += os.sep
|
||||
names = []
|
||||
for info in self.infoiter():
|
||||
checkres = checker(info)
|
||||
if type(checkres) in [str, unicode]:
|
||||
raise NotImplementedError("Condition callbacks returning strings are deprecated and only supported in Windows")
|
||||
if checkres==True and not info.isdir:
|
||||
names.append(info.filename)
|
||||
res.append(info)
|
||||
names.append(path)
|
||||
proc = self.call(command, options, names)
|
||||
stdoutdata, stderrdata = proc.communicate()
|
||||
if stderrdata.find("CRC failed")>=0:
|
||||
raise IncorrectRARPassword
|
||||
return res
|
||||
|
||||
def destruct(self):
|
||||
pass
|
||||
|
||||
|
||||
@@ -1,309 +0,0 @@
|
||||
# Copyright (c) 2003-2005 Jimmy Retzlaff, 2008 Konstantin Yegupov
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
# Low level interface - see UnRARDLL\UNRARDLL.TXT
|
||||
|
||||
from __future__ import generators
|
||||
|
||||
import ctypes, ctypes.wintypes
|
||||
import os, os.path, sys
|
||||
import Queue
|
||||
import time
|
||||
|
||||
from rar_exceptions import *
|
||||
|
||||
ERAR_END_ARCHIVE = 10
|
||||
ERAR_NO_MEMORY = 11
|
||||
ERAR_BAD_DATA = 12
|
||||
ERAR_BAD_ARCHIVE = 13
|
||||
ERAR_UNKNOWN_FORMAT = 14
|
||||
ERAR_EOPEN = 15
|
||||
ERAR_ECREATE = 16
|
||||
ERAR_ECLOSE = 17
|
||||
ERAR_EREAD = 18
|
||||
ERAR_EWRITE = 19
|
||||
ERAR_SMALL_BUF = 20
|
||||
ERAR_UNKNOWN = 21
|
||||
|
||||
RAR_OM_LIST = 0
|
||||
RAR_OM_EXTRACT = 1
|
||||
|
||||
RAR_SKIP = 0
|
||||
RAR_TEST = 1
|
||||
RAR_EXTRACT = 2
|
||||
|
||||
RAR_VOL_ASK = 0
|
||||
RAR_VOL_NOTIFY = 1
|
||||
|
||||
RAR_DLL_VERSION = 3
|
||||
|
||||
# enum UNRARCALLBACK_MESSAGES
|
||||
UCM_CHANGEVOLUME = 0
|
||||
UCM_PROCESSDATA = 1
|
||||
UCM_NEEDPASSWORD = 2
|
||||
|
||||
architecture_bits = ctypes.sizeof(ctypes.c_voidp)*8
|
||||
dll_name = "unrar.dll"
|
||||
if architecture_bits == 64:
|
||||
dll_name = "x64\\unrar64.dll"
|
||||
|
||||
|
||||
try:
|
||||
unrar = ctypes.WinDLL(os.path.join(os.path.split(__file__)[0], 'UnRARDLL', dll_name))
|
||||
except WindowsError:
|
||||
unrar = ctypes.WinDLL(dll_name)
|
||||
|
||||
|
||||
class RAROpenArchiveDataEx(ctypes.Structure):
|
||||
def __init__(self, ArcName=None, ArcNameW=u'', OpenMode=RAR_OM_LIST):
|
||||
self.CmtBuf = ctypes.c_buffer(64*1024)
|
||||
ctypes.Structure.__init__(self, ArcName=ArcName, ArcNameW=ArcNameW, OpenMode=OpenMode, _CmtBuf=ctypes.addressof(self.CmtBuf), CmtBufSize=ctypes.sizeof(self.CmtBuf))
|
||||
|
||||
_fields_ = [
|
||||
('ArcName', ctypes.c_char_p),
|
||||
('ArcNameW', ctypes.c_wchar_p),
|
||||
('OpenMode', ctypes.c_uint),
|
||||
('OpenResult', ctypes.c_uint),
|
||||
('_CmtBuf', ctypes.c_voidp),
|
||||
('CmtBufSize', ctypes.c_uint),
|
||||
('CmtSize', ctypes.c_uint),
|
||||
('CmtState', ctypes.c_uint),
|
||||
('Flags', ctypes.c_uint),
|
||||
('Reserved', ctypes.c_uint*32),
|
||||
]
|
||||
|
||||
class RARHeaderDataEx(ctypes.Structure):
|
||||
def __init__(self):
|
||||
self.CmtBuf = ctypes.c_buffer(64*1024)
|
||||
ctypes.Structure.__init__(self, _CmtBuf=ctypes.addressof(self.CmtBuf), CmtBufSize=ctypes.sizeof(self.CmtBuf))
|
||||
|
||||
_fields_ = [
|
||||
('ArcName', ctypes.c_char*1024),
|
||||
('ArcNameW', ctypes.c_wchar*1024),
|
||||
('FileName', ctypes.c_char*1024),
|
||||
('FileNameW', ctypes.c_wchar*1024),
|
||||
('Flags', ctypes.c_uint),
|
||||
('PackSize', ctypes.c_uint),
|
||||
('PackSizeHigh', ctypes.c_uint),
|
||||
('UnpSize', ctypes.c_uint),
|
||||
('UnpSizeHigh', ctypes.c_uint),
|
||||
('HostOS', ctypes.c_uint),
|
||||
('FileCRC', ctypes.c_uint),
|
||||
('FileTime', ctypes.c_uint),
|
||||
('UnpVer', ctypes.c_uint),
|
||||
('Method', ctypes.c_uint),
|
||||
('FileAttr', ctypes.c_uint),
|
||||
('_CmtBuf', ctypes.c_voidp),
|
||||
('CmtBufSize', ctypes.c_uint),
|
||||
('CmtSize', ctypes.c_uint),
|
||||
('CmtState', ctypes.c_uint),
|
||||
('Reserved', ctypes.c_uint*1024),
|
||||
]
|
||||
|
||||
def DosDateTimeToTimeTuple(dosDateTime):
|
||||
"""Convert an MS-DOS format date time to a Python time tuple.
|
||||
"""
|
||||
dosDate = dosDateTime >> 16
|
||||
dosTime = dosDateTime & 0xffff
|
||||
day = dosDate & 0x1f
|
||||
month = (dosDate >> 5) & 0xf
|
||||
year = 1980 + (dosDate >> 9)
|
||||
second = 2*(dosTime & 0x1f)
|
||||
minute = (dosTime >> 5) & 0x3f
|
||||
hour = dosTime >> 11
|
||||
return time.localtime(time.mktime((year, month, day, hour, minute, second, 0, 1, -1)))
|
||||
|
||||
def _wrap(restype, function, argtypes):
|
||||
result = function
|
||||
result.argtypes = argtypes
|
||||
result.restype = restype
|
||||
return result
|
||||
|
||||
RARGetDllVersion = _wrap(ctypes.c_int, unrar.RARGetDllVersion, [])
|
||||
|
||||
RAROpenArchiveEx = _wrap(ctypes.wintypes.HANDLE, unrar.RAROpenArchiveEx, [ctypes.POINTER(RAROpenArchiveDataEx)])
|
||||
|
||||
RARReadHeaderEx = _wrap(ctypes.c_int, unrar.RARReadHeaderEx, [ctypes.wintypes.HANDLE, ctypes.POINTER(RARHeaderDataEx)])
|
||||
|
||||
_RARSetPassword = _wrap(ctypes.c_int, unrar.RARSetPassword, [ctypes.wintypes.HANDLE, ctypes.c_char_p])
|
||||
def RARSetPassword(*args, **kwargs):
|
||||
_RARSetPassword(*args, **kwargs)
|
||||
|
||||
RARProcessFile = _wrap(ctypes.c_int, unrar.RARProcessFile, [ctypes.wintypes.HANDLE, ctypes.c_int, ctypes.c_char_p, ctypes.c_char_p])
|
||||
|
||||
RARCloseArchive = _wrap(ctypes.c_int, unrar.RARCloseArchive, [ctypes.wintypes.HANDLE])
|
||||
|
||||
UNRARCALLBACK = ctypes.WINFUNCTYPE(ctypes.c_int, ctypes.c_uint, ctypes.c_long, ctypes.c_long, ctypes.c_long)
|
||||
RARSetCallback = _wrap(ctypes.c_int, unrar.RARSetCallback, [ctypes.wintypes.HANDLE, UNRARCALLBACK, ctypes.c_long])
|
||||
|
||||
|
||||
|
||||
RARExceptions = {
|
||||
ERAR_NO_MEMORY : MemoryError,
|
||||
ERAR_BAD_DATA : ArchiveHeaderBroken,
|
||||
ERAR_BAD_ARCHIVE : InvalidRARArchive,
|
||||
ERAR_EOPEN : FileOpenError,
|
||||
}
|
||||
|
||||
class PassiveReader:
|
||||
"""Used for reading files to memory"""
|
||||
def __init__(self, usercallback = None):
|
||||
self.buf = []
|
||||
self.ucb = usercallback
|
||||
|
||||
def _callback(self, msg, UserData, P1, P2):
|
||||
if msg == UCM_PROCESSDATA:
|
||||
data = (ctypes.c_char*P2).from_address(P1).raw
|
||||
if self.ucb!=None:
|
||||
self.ucb(data)
|
||||
else:
|
||||
self.buf.append(data)
|
||||
return 1
|
||||
|
||||
def get_result(self):
|
||||
return ''.join(self.buf)
|
||||
|
||||
class RarInfoIterator(object):
|
||||
def __init__(self, arc):
|
||||
self.arc = arc
|
||||
self.index = 0
|
||||
self.headerData = RARHeaderDataEx()
|
||||
self.res = RARReadHeaderEx(self.arc._handle, ctypes.byref(self.headerData))
|
||||
if self.res==ERAR_BAD_DATA:
|
||||
raise IncorrectRARPassword
|
||||
self.arc.lockStatus = "locked"
|
||||
self.arc.needskip = False
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
if self.index>0:
|
||||
if self.arc.needskip:
|
||||
RARProcessFile(self.arc._handle, RAR_SKIP, None, None)
|
||||
self.res = RARReadHeaderEx(self.arc._handle, ctypes.byref(self.headerData))
|
||||
|
||||
if self.res:
|
||||
raise StopIteration
|
||||
self.arc.needskip = True
|
||||
|
||||
data = {}
|
||||
data['index'] = self.index
|
||||
data['filename'] = self.headerData.FileName
|
||||
data['datetime'] = DosDateTimeToTimeTuple(self.headerData.FileTime)
|
||||
data['isdir'] = ((self.headerData.Flags & 0xE0) == 0xE0)
|
||||
data['size'] = self.headerData.UnpSize + (self.headerData.UnpSizeHigh << 32)
|
||||
if self.headerData.CmtState == 1:
|
||||
data['comment'] = self.headerData.CmtBuf.value
|
||||
else:
|
||||
data['comment'] = None
|
||||
self.index += 1
|
||||
return data
|
||||
|
||||
|
||||
def __del__(self):
|
||||
self.arc.lockStatus = "finished"
|
||||
|
||||
def generate_password_provider(password):
|
||||
def password_provider_callback(msg, UserData, P1, P2):
|
||||
if msg == UCM_NEEDPASSWORD and password!=None:
|
||||
(ctypes.c_char*P2).from_address(P1).value = password
|
||||
return 1
|
||||
return password_provider_callback
|
||||
|
||||
class RarFileImplementation(object):
|
||||
|
||||
def init(self, password=None):
|
||||
self.password = password
|
||||
archiveData = RAROpenArchiveDataEx(ArcNameW=self.archiveName, OpenMode=RAR_OM_EXTRACT)
|
||||
self._handle = RAROpenArchiveEx(ctypes.byref(archiveData))
|
||||
self.c_callback = UNRARCALLBACK(generate_password_provider(self.password))
|
||||
RARSetCallback(self._handle, self.c_callback, 1)
|
||||
|
||||
if archiveData.OpenResult != 0:
|
||||
raise RARExceptions[archiveData.OpenResult]
|
||||
|
||||
if archiveData.CmtState == 1:
|
||||
self.comment = archiveData.CmtBuf.value
|
||||
else:
|
||||
self.comment = None
|
||||
|
||||
if password:
|
||||
RARSetPassword(self._handle, password)
|
||||
|
||||
self.lockStatus = "ready"
|
||||
|
||||
|
||||
|
||||
def destruct(self):
|
||||
if self._handle and RARCloseArchive:
|
||||
RARCloseArchive(self._handle)
|
||||
|
||||
def make_sure_ready(self):
|
||||
if self.lockStatus == "locked":
|
||||
raise InvalidRARArchiveUsage("cannot execute infoiter() without finishing previous one")
|
||||
if self.lockStatus == "finished":
|
||||
self.destruct()
|
||||
self.init(self.password)
|
||||
|
||||
def infoiter(self):
|
||||
self.make_sure_ready()
|
||||
return RarInfoIterator(self)
|
||||
|
||||
def read_files(self, checker):
|
||||
res = []
|
||||
for info in self.infoiter():
|
||||
if checker(info) and not info.isdir:
|
||||
reader = PassiveReader()
|
||||
c_callback = UNRARCALLBACK(reader._callback)
|
||||
RARSetCallback(self._handle, c_callback, 1)
|
||||
tmpres = RARProcessFile(self._handle, RAR_TEST, None, None)
|
||||
if tmpres==ERAR_BAD_DATA:
|
||||
raise IncorrectRARPassword
|
||||
self.needskip = False
|
||||
res.append((info, reader.get_result()))
|
||||
return res
|
||||
|
||||
|
||||
def extract(self, checker, path, withSubpath, overwrite):
|
||||
res = []
|
||||
for info in self.infoiter():
|
||||
checkres = checker(info)
|
||||
if checkres!=False and not info.isdir:
|
||||
if checkres==True:
|
||||
fn = info.filename
|
||||
if not withSubpath:
|
||||
fn = os.path.split(fn)[-1]
|
||||
target = os.path.join(path, fn)
|
||||
else:
|
||||
raise DeprecationWarning, "Condition callbacks returning strings are deprecated and only supported in Windows"
|
||||
target = checkres
|
||||
if overwrite or (not os.path.exists(target)):
|
||||
tmpres = RARProcessFile(self._handle, RAR_EXTRACT, None, target)
|
||||
if tmpres==ERAR_BAD_DATA:
|
||||
raise IncorrectRARPassword
|
||||
|
||||
self.needskip = False
|
||||
res.append(info)
|
||||
return res
|
||||
|
||||
|
||||
@@ -1,202 +0,0 @@
|
||||
"""
|
||||
A PyQT4 dialog to select from automated issue matches
|
||||
"""
|
||||
|
||||
"""
|
||||
Copyright 2012 Anthony Beville
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from PyQt4 import QtCore, QtGui, uic
|
||||
|
||||
from PyQt4.QtCore import QUrl, pyqtSignal, QByteArray
|
||||
|
||||
from imagefetcher import ImageFetcher
|
||||
from settings import ComicTaggerSettings
|
||||
|
||||
class AutoTagMatchWindow(QtGui.QDialog):
|
||||
|
||||
volume_id = 0
|
||||
|
||||
def __init__(self, parent, match_set_list, style, fetch_func):
|
||||
super(AutoTagMatchWindow, self).__init__(parent)
|
||||
|
||||
uic.loadUi(os.path.join(ComicTaggerSettings.baseDir(), 'autotagmatchwindow.ui' ), self)
|
||||
|
||||
self.skipButton = QtGui.QPushButton(self.tr("Skip"))
|
||||
self.buttonBox.addButton(self.skipButton, QtGui.QDialogButtonBox.ActionRole)
|
||||
self.buttonBox.button(QtGui.QDialogButtonBox.Ok).setText("Accept and Next")
|
||||
|
||||
self.match_set_list = match_set_list
|
||||
self.style = style
|
||||
self.fetch_func = fetch_func
|
||||
|
||||
self.current_match_set_idx = 0
|
||||
|
||||
self.twList.currentItemChanged.connect(self.currentItemChanged)
|
||||
self.twList.cellDoubleClicked.connect(self.cellDoubleClicked)
|
||||
self.skipButton.clicked.connect(self.skipToNext)
|
||||
|
||||
self.updateData()
|
||||
|
||||
def updateData( self):
|
||||
|
||||
self.current_match_set = self.match_set_list[ self.current_match_set_idx ]
|
||||
|
||||
|
||||
if self.current_match_set_idx + 1 == len( self.match_set_list ):
|
||||
self.skipButton.setDisabled(True)
|
||||
|
||||
self.setCoverImage()
|
||||
self.populateTable()
|
||||
self.twList.resizeColumnsToContents()
|
||||
self.current_row = 0
|
||||
self.twList.selectRow( 0 )
|
||||
|
||||
path = self.current_match_set.ca.path
|
||||
self.setWindowTitle( "Select correct match ({0} of {1}): {2}".format(
|
||||
self.current_match_set_idx+1,
|
||||
len( self.match_set_list ),
|
||||
os.path.split(path)[1] ))
|
||||
|
||||
def populateTable( self ):
|
||||
|
||||
while self.twList.rowCount() > 0:
|
||||
self.twList.removeRow(0)
|
||||
|
||||
self.twList.setSortingEnabled(False)
|
||||
|
||||
row = 0
|
||||
for match in self.current_match_set.matches:
|
||||
self.twList.insertRow(row)
|
||||
|
||||
item_text = match['series']
|
||||
item = QtGui.QTableWidgetItem(item_text)
|
||||
item.setFlags(QtCore.Qt.ItemIsSelectable| QtCore.Qt.ItemIsEnabled)
|
||||
self.twList.setItem(row, 0, item)
|
||||
|
||||
if match['publisher'] is not None:
|
||||
item_text = u"{0}".format(match['publisher'])
|
||||
else:
|
||||
item_text = u"Unknown"
|
||||
item = QtGui.QTableWidgetItem(item_text)
|
||||
item.setFlags(QtCore.Qt.ItemIsSelectable| QtCore.Qt.ItemIsEnabled)
|
||||
self.twList.setItem(row, 1, item)
|
||||
|
||||
item_text = ""
|
||||
if match['month'] is not None:
|
||||
item_text = u"{0}/".format(match['month'])
|
||||
if match['year'] is not None:
|
||||
item_text += u"{0}".format(match['year'])
|
||||
else:
|
||||
item_text += u"????"
|
||||
item = QtGui.QTableWidgetItem(item_text)
|
||||
item.setFlags(QtCore.Qt.ItemIsSelectable| QtCore.Qt.ItemIsEnabled)
|
||||
self.twList.setItem(row, 2, item)
|
||||
|
||||
row += 1
|
||||
|
||||
|
||||
def cellDoubleClicked( self, r, c ):
|
||||
self.accept()
|
||||
|
||||
def currentItemChanged( self, curr, prev ):
|
||||
|
||||
if curr is None:
|
||||
return
|
||||
if prev is not None and prev.row() == curr.row():
|
||||
return
|
||||
|
||||
self.current_row = curr.row()
|
||||
|
||||
# list selection was changed, update the the issue cover
|
||||
self.labelThumbnail.setPixmap(QtGui.QPixmap(os.path.join(ComicTaggerSettings.baseDir(), 'graphics/nocover.png' )))
|
||||
|
||||
self.cover_fetcher = ImageFetcher( )
|
||||
self.cover_fetcher.fetchComplete.connect(self.coverFetchComplete)
|
||||
self.cover_fetcher.fetch( self.current_match_set.matches[self.current_row]['img_url'] )
|
||||
|
||||
# called when the image is done loading
|
||||
def coverFetchComplete( self, image_data, issue_id ):
|
||||
img = QtGui.QImage()
|
||||
img.loadFromData( image_data )
|
||||
self.labelThumbnail.setPixmap(QtGui.QPixmap(img))
|
||||
|
||||
def setCoverImage( self ):
|
||||
ca = self.current_match_set.ca
|
||||
cover_idx = ca.readMetadata(self.style).getCoverPageIndexList()[0]
|
||||
image_data = ca.getPage( cover_idx )
|
||||
self.labelCover.setScaledContents(True)
|
||||
if image_data is not None:
|
||||
img = QtGui.QImage()
|
||||
img.loadFromData( image_data )
|
||||
self.labelCover.setPixmap(QtGui.QPixmap(img))
|
||||
else:
|
||||
self.labelCover.setPixmap(QtGui.QPixmap(os.path.join(ComicTaggerSettings.baseDir(), 'graphics/nocover.png' )))
|
||||
|
||||
def accept(self):
|
||||
|
||||
self.saveMatch()
|
||||
self.current_match_set_idx += 1
|
||||
|
||||
if self.current_match_set_idx == len( self.match_set_list ):
|
||||
# no more items
|
||||
QtGui.QDialog.accept(self)
|
||||
else:
|
||||
self.updateData()
|
||||
|
||||
def skipToNext( self ):
|
||||
self.current_match_set_idx += 1
|
||||
|
||||
if self.current_match_set_idx == len( self.match_set_list ):
|
||||
# no more items
|
||||
QtGui.QDialog.reject(self)
|
||||
else:
|
||||
self.updateData()
|
||||
|
||||
def reject(self):
|
||||
reply = QtGui.QMessageBox.question(self,
|
||||
self.tr("Cancel Matching"),
|
||||
self.tr("Are you sure you wish to cancel the matching process?"),
|
||||
QtGui.QMessageBox.Yes, QtGui.QMessageBox.No )
|
||||
|
||||
if reply == QtGui.QMessageBox.No:
|
||||
return
|
||||
|
||||
QtGui.QDialog.reject(self)
|
||||
|
||||
def saveMatch( self ):
|
||||
|
||||
match = self.current_match_set.matches[self.current_row]
|
||||
ca = self.current_match_set.ca
|
||||
|
||||
md = ca.readMetadata( self.style )
|
||||
if md.isEmpty:
|
||||
md = ca.metadataFromFilename()
|
||||
|
||||
# now get the particular issue data
|
||||
cv_md = self.fetch_func( match )
|
||||
if cv_md is None:
|
||||
QtGui.QMessageBox.critical(self, self.tr("Network Issue"), self.tr("Could not connect to ComicVine to get issue details!"))
|
||||
return
|
||||
|
||||
QtGui.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
|
||||
md.overlay( cv_md )
|
||||
success = ca.writeMetadata( md, self.style )
|
||||
QtGui.QApplication.restoreOverrideCursor()
|
||||
|
||||
if not success:
|
||||
QtGui.QMessageBox.warning(self, self.tr("Write Error"), self.tr("Saving the tags to the archive seemed to fail!"))
|
||||
@@ -1,161 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>dialogMatchSelect</class>
|
||||
<widget class="QDialog" name="dialogMatchSelect">
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>831</width>
|
||||
<height>506</height>
|
||||
</rect>
|
||||
</property>
|
||||
<property name="windowTitle">
|
||||
<string>Select Match</string>
|
||||
</property>
|
||||
<layout class="QGridLayout" name="gridLayout">
|
||||
<item row="0" column="1">
|
||||
<layout class="QVBoxLayout" name="verticalLayout">
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout">
|
||||
<item>
|
||||
<widget class="QLabel" name="labelCover">
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>200</width>
|
||||
<height>0</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>200</width>
|
||||
<height>300</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>TextLabel</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QTableWidget" name="twList">
|
||||
<property name="font">
|
||||
<font>
|
||||
<pointsize>9</pointsize>
|
||||
</font>
|
||||
</property>
|
||||
<property name="selectionMode">
|
||||
<enum>QAbstractItemView::SingleSelection</enum>
|
||||
</property>
|
||||
<property name="selectionBehavior">
|
||||
<enum>QAbstractItemView::SelectRows</enum>
|
||||
</property>
|
||||
<property name="rowCount">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="columnCount">
|
||||
<number>3</number>
|
||||
</property>
|
||||
<attribute name="horizontalHeaderStretchLastSection">
|
||||
<bool>true</bool>
|
||||
</attribute>
|
||||
<attribute name="verticalHeaderVisible">
|
||||
<bool>false</bool>
|
||||
</attribute>
|
||||
<column>
|
||||
<property name="text">
|
||||
<string>Series</string>
|
||||
</property>
|
||||
</column>
|
||||
<column>
|
||||
<property name="text">
|
||||
<string>Publisher</string>
|
||||
</property>
|
||||
</column>
|
||||
<column>
|
||||
<property name="text">
|
||||
<string>Date</string>
|
||||
</property>
|
||||
</column>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QLabel" name="labelThumbnail">
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>200</width>
|
||||
<height>0</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>200</width>
|
||||
<height>300</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="frameShape">
|
||||
<enum>QFrame::Panel</enum>
|
||||
</property>
|
||||
<property name="frameShadow">
|
||||
<enum>QFrame::Sunken</enum>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
<property name="scaledContents">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QDialogButtonBox" name="buttonBox">
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="standardButtons">
|
||||
<set>QDialogButtonBox::Cancel|QDialogButtonBox::Ok</set>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
<resources/>
|
||||
<connections>
|
||||
<connection>
|
||||
<sender>buttonBox</sender>
|
||||
<signal>accepted()</signal>
|
||||
<receiver>dialogMatchSelect</receiver>
|
||||
<slot>accept()</slot>
|
||||
<hints>
|
||||
<hint type="sourcelabel">
|
||||
<x>248</x>
|
||||
<y>254</y>
|
||||
</hint>
|
||||
<hint type="destinationlabel">
|
||||
<x>157</x>
|
||||
<y>274</y>
|
||||
</hint>
|
||||
</hints>
|
||||
</connection>
|
||||
<connection>
|
||||
<sender>buttonBox</sender>
|
||||
<signal>rejected()</signal>
|
||||
<receiver>dialogMatchSelect</receiver>
|
||||
<slot>reject()</slot>
|
||||
<hints>
|
||||
<hint type="sourcelabel">
|
||||
<x>316</x>
|
||||
<y>260</y>
|
||||
</hint>
|
||||
<hint type="destinationlabel">
|
||||
<x>286</x>
|
||||
<y>274</y>
|
||||
</hint>
|
||||
</hints>
|
||||
</connection>
|
||||
</connections>
|
||||
</ui>
|
||||
@@ -1,67 +0,0 @@
|
||||
"""
|
||||
A PyQT4 dialog to show ID log and progress
|
||||
"""
|
||||
|
||||
"""
|
||||
Copyright 2012 Anthony Beville
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from PyQt4 import QtCore, QtGui, uic
|
||||
import os
|
||||
from settings import ComicTaggerSettings
|
||||
|
||||
|
||||
class AutoTagProgressWindow(QtGui.QDialog):
|
||||
|
||||
|
||||
def __init__(self, parent):
|
||||
super(AutoTagProgressWindow, self).__init__(parent)
|
||||
|
||||
uic.loadUi(os.path.join(ComicTaggerSettings.baseDir(), 'autotagprogresswindow.ui' ), self)
|
||||
self.lblTest.setPixmap(QtGui.QPixmap(os.path.join(ComicTaggerSettings.baseDir(), 'graphics/nocover.png' )))
|
||||
self.lblArchive.setPixmap(QtGui.QPixmap(os.path.join(ComicTaggerSettings.baseDir(), 'graphics/nocover.png' )))
|
||||
self.isdone = False
|
||||
|
||||
# we can't specify relative font sizes in the UI designer, so
|
||||
# make font for scroll window a smidge smaller
|
||||
f = self.textEdit.font()
|
||||
if f.pointSize() > 10:
|
||||
f.setPointSize( f.pointSize() - 2 )
|
||||
self.textEdit.setFont( f )
|
||||
|
||||
def setArchiveImage( self, img_data):
|
||||
self.setCoverImage( img_data, self.lblArchive )
|
||||
|
||||
def setTestImage( self, img_data):
|
||||
self.setCoverImage( img_data, self.lblTest )
|
||||
|
||||
def setCoverImage( self, img_data , label):
|
||||
if img_data is not None:
|
||||
img = QtGui.QImage()
|
||||
img.loadFromData( img_data )
|
||||
label.setPixmap(QtGui.QPixmap(img))
|
||||
label.setScaledContents(True)
|
||||
else:
|
||||
label.setPixmap(QtGui.QPixmap(os.path.join(ComicTaggerSettings.baseDir(), 'graphics/nocover.png' )))
|
||||
label.setScaledContents(True)
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
|
||||
def reject(self):
|
||||
QtGui.QDialog.reject(self)
|
||||
self.isdone = True
|
||||
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
"""
|
||||
A PyQT4 dialog to confirm and set options for auto-tag
|
||||
"""
|
||||
|
||||
"""
|
||||
Copyright 2012 Anthony Beville
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
|
||||
from PyQt4 import QtCore, QtGui, uic
|
||||
from settings import ComicTaggerSettings
|
||||
from settingswindow import SettingsWindow
|
||||
from filerenamer import FileRenamer
|
||||
import os
|
||||
import utils
|
||||
|
||||
|
||||
class AutoTagStartWindow(QtGui.QDialog):
|
||||
|
||||
def __init__( self, parent, settings, msg ):
|
||||
super(AutoTagStartWindow, self).__init__(parent)
|
||||
|
||||
uic.loadUi(os.path.join(ComicTaggerSettings.baseDir(), 'autotagstartwindow.ui' ), self)
|
||||
self.label.setText( msg )
|
||||
|
||||
self.settings = settings
|
||||
|
||||
self.cbxSaveOnLowConfidence.setCheckState( QtCore.Qt.Unchecked )
|
||||
self.cbxDontUseYear.setCheckState( QtCore.Qt.Unchecked )
|
||||
self.cbxAssumeIssueOne.setCheckState( QtCore.Qt.Unchecked )
|
||||
|
||||
self.autoSaveOnLow = False
|
||||
self.dontUseYear = False
|
||||
self.assumeIssueOne = False
|
||||
|
||||
|
||||
def accept( self ):
|
||||
QtGui.QDialog.accept(self)
|
||||
|
||||
self.autoSaveOnLow = self.cbxSaveOnLowConfidence.isChecked()
|
||||
self.dontUseYear = self.cbxDontUseYear.isChecked()
|
||||
self.assumeIssueOne = self.cbxAssumeIssueOne.isChecked()
|
||||
|
||||
@@ -1,124 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>dialogExport</class>
|
||||
<widget class="QDialog" name="dialogExport">
|
||||
<property name="windowModality">
|
||||
<enum>Qt::NonModal</enum>
|
||||
</property>
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>524</width>
|
||||
<height>248</height>
|
||||
</rect>
|
||||
</property>
|
||||
<property name="windowTitle">
|
||||
<string>Auto-Tag</string>
|
||||
</property>
|
||||
<property name="modal">
|
||||
<bool>false</bool>
|
||||
</property>
|
||||
<layout class="QGridLayout" name="gridLayout_3">
|
||||
<item row="0" column="0">
|
||||
<layout class="QVBoxLayout" name="verticalLayout">
|
||||
<item>
|
||||
<widget class="QLabel" name="label">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Preferred" vsizetype="Preferred">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
<property name="wordWrap">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<layout class="QFormLayout" name="formLayout">
|
||||
<property name="fieldGrowthPolicy">
|
||||
<enum>QFormLayout::AllNonFixedFieldsGrow</enum>
|
||||
</property>
|
||||
<item row="1" column="1">
|
||||
<widget class="QCheckBox" name="cbxDontUseYear">
|
||||
<property name="text">
|
||||
<string>Don't use publication year in indentification process</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="0" column="1">
|
||||
<widget class="QCheckBox" name="cbxSaveOnLowConfidence">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Minimum" vsizetype="Preferred">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Save on low confidence match</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="1">
|
||||
<widget class="QCheckBox" name="cbxAssumeIssueOne">
|
||||
<property name="text">
|
||||
<string>If no issue number, assume "1"</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QDialogButtonBox" name="buttonBox">
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="standardButtons">
|
||||
<set>QDialogButtonBox::Cancel|QDialogButtonBox::Ok</set>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
<resources/>
|
||||
<connections>
|
||||
<connection>
|
||||
<sender>buttonBox</sender>
|
||||
<signal>accepted()</signal>
|
||||
<receiver>dialogExport</receiver>
|
||||
<slot>accept()</slot>
|
||||
<hints>
|
||||
<hint type="sourcelabel">
|
||||
<x>346</x>
|
||||
<y>187</y>
|
||||
</hint>
|
||||
<hint type="destinationlabel">
|
||||
<x>277</x>
|
||||
<y>104</y>
|
||||
</hint>
|
||||
</hints>
|
||||
</connection>
|
||||
<connection>
|
||||
<sender>buttonBox</sender>
|
||||
<signal>rejected()</signal>
|
||||
<receiver>dialogExport</receiver>
|
||||
<slot>reject()</slot>
|
||||
<hints>
|
||||
<hint type="sourcelabel">
|
||||
<x>346</x>
|
||||
<y>187</y>
|
||||
</hint>
|
||||
<hint type="destinationlabel">
|
||||
<x>277</x>
|
||||
<y>104</y>
|
||||
</hint>
|
||||
</hints>
|
||||
</connection>
|
||||
</connections>
|
||||
</ui>
|
||||
@@ -1,99 +0,0 @@
|
||||
"""
|
||||
Class to manage modifying metadata specifically for CBL/CBI
|
||||
"""
|
||||
|
||||
"""
|
||||
Copyright 2012 Anthony Beville
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import os
|
||||
import utils
|
||||
|
||||
|
||||
class CBLTransformer:
|
||||
def __init__( self, metadata, settings ):
|
||||
self.metadata = metadata
|
||||
self.settings = settings
|
||||
|
||||
|
||||
def apply( self ):
|
||||
# helper funcs
|
||||
def append_to_tags_if_unique( item ):
|
||||
if item.lower() not in (tag.lower() for tag in self.metadata.tags):
|
||||
self.metadata.tags.append( item )
|
||||
|
||||
def add_string_list_to_tags( str_list ):
|
||||
if str_list is not None and str_list != "":
|
||||
items = [ s.strip() for s in str_list.split(',') ]
|
||||
for item in items:
|
||||
append_to_tags_if_unique( item )
|
||||
|
||||
if self.settings.assume_lone_credit_is_primary:
|
||||
|
||||
# helper
|
||||
def setLonePrimary( role_list ):
|
||||
lone_credit = None
|
||||
count = 0
|
||||
for c in self.metadata.credits:
|
||||
if c['role'].lower() in role_list:
|
||||
count += 1
|
||||
lone_credit = c
|
||||
if count > 1:
|
||||
lone_credit = None
|
||||
break
|
||||
if lone_credit is not None:
|
||||
lone_credit['primary'] = True
|
||||
return lone_credit, count
|
||||
|
||||
#need to loop three times, once for 'writer', 'artist', and then 'penciler' if no artist
|
||||
setLonePrimary( ['writer'] )
|
||||
c, count = setLonePrimary( ['artist'] )
|
||||
if c is None and count == 0:
|
||||
c, count = setLonePrimary( ['penciler', 'penciller'] )
|
||||
if c is not None:
|
||||
c['primary'] = False
|
||||
self.metadata.addCredit( c['person'], 'Artist', True )
|
||||
|
||||
if self.settings.copy_characters_to_tags:
|
||||
add_string_list_to_tags( self.metadata.characters )
|
||||
|
||||
if self.settings.copy_teams_to_tags:
|
||||
add_string_list_to_tags( self.metadata.teams )
|
||||
|
||||
if self.settings.copy_locations_to_tags:
|
||||
add_string_list_to_tags( self.metadata.locations )
|
||||
|
||||
if self.settings.copy_notes_to_comments:
|
||||
if self.metadata.notes is not None:
|
||||
if self.metadata.comments is None:
|
||||
self.metadata.comments = ""
|
||||
else:
|
||||
self.metadata.comments += "\n\n"
|
||||
if self.metadata.notes not in self.metadata.comments:
|
||||
self.metadata.comments += self.metadata.notes
|
||||
|
||||
if self.settings.copy_weblink_to_comments:
|
||||
if self.metadata.webLink is not None:
|
||||
if self.metadata.comments is None:
|
||||
self.metadata.comments = ""
|
||||
else:
|
||||
self.metadata.comments += "\n\n"
|
||||
if self.metadata.webLink not in self.metadata.comments:
|
||||
self.metadata.comments += self.metadata.webLink
|
||||
|
||||
return self.metadata
|
||||
|
||||
|
||||
|
||||
260
comet.py
@@ -1,260 +0,0 @@
|
||||
"""
|
||||
A python class to encapsulate CoMet data
|
||||
"""
|
||||
|
||||
"""
|
||||
Copyright 2012 Anthony Beville
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
import zipfile
|
||||
from pprint import pprint
|
||||
import xml.etree.ElementTree as ET
|
||||
from genericmetadata import GenericMetadata
|
||||
import utils
|
||||
|
||||
class CoMet:
|
||||
|
||||
writer_synonyms = ['writer', 'plotter', 'scripter']
|
||||
penciller_synonyms = [ 'artist', 'penciller', 'penciler', 'breakdowns' ]
|
||||
inker_synonyms = [ 'inker', 'artist', 'finishes' ]
|
||||
colorist_synonyms = [ 'colorist', 'colourist', 'colorer', 'colourer' ]
|
||||
letterer_synonyms = [ 'letterer']
|
||||
cover_synonyms = [ 'cover', 'covers', 'coverartist', 'cover artist' ]
|
||||
editor_synonyms = [ 'editor']
|
||||
|
||||
def metadataFromString( self, string ):
|
||||
|
||||
tree = ET.ElementTree(ET.fromstring( string ))
|
||||
return self.convertXMLToMetadata( tree )
|
||||
|
||||
def stringFromMetadata( self, metadata ):
|
||||
|
||||
header = '<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
|
||||
tree = self.convertMetadataToXML( self, metadata )
|
||||
return header + ET.tostring(tree.getroot())
|
||||
|
||||
def indent( self, elem, level=0 ):
|
||||
# for making the XML output readable
|
||||
i = "\n" + level*" "
|
||||
if len(elem):
|
||||
if not elem.text or not elem.text.strip():
|
||||
elem.text = i + " "
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
for elem in elem:
|
||||
self.indent( elem, level+1 )
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
else:
|
||||
if level and (not elem.tail or not elem.tail.strip()):
|
||||
elem.tail = i
|
||||
|
||||
def convertMetadataToXML( self, filename, metadata ):
|
||||
|
||||
#shorthand for the metadata
|
||||
md = metadata
|
||||
|
||||
# build a tree structure
|
||||
root = ET.Element("comet")
|
||||
root.attrib['xmlns:comet'] = "http://www.denvog.com/comet/"
|
||||
root.attrib['xmlns:xsi'] = "http://www.w3.org/2001/XMLSchema-instance"
|
||||
root.attrib['xsi:schemaLocation'] = "http://www.denvog.com http://www.denvog.com/comet/comet.xsd"
|
||||
|
||||
#helper func
|
||||
def assign( comet_entry, md_entry):
|
||||
if md_entry is not None:
|
||||
ET.SubElement(root, comet_entry).text = u"{0}".format(md_entry)
|
||||
|
||||
# title is manditory
|
||||
if md.title is None:
|
||||
md.title = ""
|
||||
assign( 'title', md.title )
|
||||
assign( 'series', md.series )
|
||||
assign( 'issue', md.issue ) #must be int??
|
||||
assign( 'volume', md.volume )
|
||||
assign( 'description', md.comments )
|
||||
assign( 'publisher', md.publisher )
|
||||
assign( 'pages', md.pageCount )
|
||||
assign( 'format', md.format )
|
||||
assign( 'language', md.language )
|
||||
assign( 'rating', md.maturityRating )
|
||||
assign( 'price', md.price )
|
||||
assign( 'isVersionOf', md.isVersionOf )
|
||||
assign( 'rights', md.rights )
|
||||
assign( 'identifier', md.identifier )
|
||||
assign( 'lastMark', md.lastMark )
|
||||
assign( 'genre', md.genre ) # TODO repeatable
|
||||
|
||||
if md.characters is not None:
|
||||
char_list = [ c.strip() for c in md.characters.split(',') ]
|
||||
for c in char_list:
|
||||
assign( 'character', c )
|
||||
|
||||
if md.manga is not None and md.manga == "YesAndRightToLeft":
|
||||
assign( 'readingDirection', "rtl")
|
||||
|
||||
date_str = ""
|
||||
if md.year is not None:
|
||||
date_str = str(md.year).zfill(4)
|
||||
if md.month is not None:
|
||||
date_str += "-" + str(md.month).zfill(2)
|
||||
assign( 'date', date_str )
|
||||
|
||||
assign( 'coverImage', md.coverImage )
|
||||
|
||||
# need to specially process the credits, since they are structured differently than CIX
|
||||
credit_writer_list = list()
|
||||
credit_penciller_list = list()
|
||||
credit_inker_list = list()
|
||||
credit_colorist_list = list()
|
||||
credit_letterer_list = list()
|
||||
credit_cover_list = list()
|
||||
credit_editor_list = list()
|
||||
|
||||
# loop thru credits, and build a list for each role that CoMet supports
|
||||
for credit in metadata.credits:
|
||||
|
||||
if credit['role'].lower() in set( self.writer_synonyms ):
|
||||
ET.SubElement(root, 'writer').text = u"{0}".format(credit['person'])
|
||||
|
||||
if credit['role'].lower() in set( self.penciller_synonyms ):
|
||||
ET.SubElement(root, 'penciller').text = u"{0}".format(credit['person'])
|
||||
|
||||
if credit['role'].lower() in set( self.inker_synonyms ):
|
||||
ET.SubElement(root, 'inker').text = u"{0}".format(credit['person'])
|
||||
|
||||
if credit['role'].lower() in set( self.colorist_synonyms ):
|
||||
ET.SubElement(root, 'colorist').text = u"{0}".format(credit['person'])
|
||||
|
||||
if credit['role'].lower() in set( self.letterer_synonyms ):
|
||||
ET.SubElement(root, 'letterer').text = u"{0}".format(credit['person'])
|
||||
|
||||
if credit['role'].lower() in set( self.cover_synonyms ):
|
||||
ET.SubElement(root, 'coverDesigner').text = u"{0}".format(credit['person'])
|
||||
|
||||
if credit['role'].lower() in set( self.editor_synonyms ):
|
||||
ET.SubElement(root, 'editor').text = u"{0}".format(credit['person'])
|
||||
|
||||
|
||||
# self pretty-print
|
||||
self.indent(root)
|
||||
|
||||
# wrap it in an ElementTree instance, and save as XML
|
||||
tree = ET.ElementTree(root)
|
||||
return tree
|
||||
|
||||
|
||||
def convertXMLToMetadata( self, tree ):
|
||||
|
||||
root = tree.getroot()
|
||||
|
||||
if root.tag != 'comet':
|
||||
raise 1
|
||||
return None
|
||||
|
||||
metadata = GenericMetadata()
|
||||
md = metadata
|
||||
|
||||
# Helper function
|
||||
def xlate( tag ):
|
||||
node = root.find( tag )
|
||||
if node is not None:
|
||||
return node.text
|
||||
else:
|
||||
return None
|
||||
|
||||
md.series = xlate( 'series' )
|
||||
md.title = xlate( 'title' )
|
||||
md.issue = xlate( 'issue' )
|
||||
md.volume = xlate( 'volume' )
|
||||
md.comments = xlate( 'description' )
|
||||
md.publisher = xlate( 'publisher' )
|
||||
md.language = xlate( 'language' )
|
||||
md.format = xlate( 'format' )
|
||||
md.pageCount = xlate( 'pages' )
|
||||
md.maturityRating = xlate( 'rating' )
|
||||
md.price = xlate( 'price' )
|
||||
md.isVersionOf = xlate( 'isVersionOf' )
|
||||
md.rights = xlate( 'rights' )
|
||||
md.identifier = xlate( 'identifier' )
|
||||
md.lastMark = xlate( 'lastMark' )
|
||||
md.genre = xlate( 'genre' ) # TODO - repeatable field
|
||||
|
||||
date = xlate( 'date' )
|
||||
if date is not None:
|
||||
parts = date.split('-')
|
||||
if len( parts) > 0:
|
||||
md.year = parts[0]
|
||||
if len( parts) > 1:
|
||||
md.month = parts[1]
|
||||
|
||||
md.coverImage = xlate( 'coverImage' )
|
||||
|
||||
readingDirection = xlate( 'readingDirection' )
|
||||
if readingDirection is not None and readingDirection == "rtl":
|
||||
md.manga = "YesAndRightToLeft"
|
||||
|
||||
# loop for character tags
|
||||
char_list = []
|
||||
for n in root:
|
||||
if n.tag == 'character':
|
||||
char_list.append(n.text.strip())
|
||||
md.characters = utils.listToString( char_list )
|
||||
|
||||
# Now extract the credit info
|
||||
for n in root:
|
||||
if ( n.tag == 'writer' or
|
||||
n.tag == 'penciller' or
|
||||
n.tag == 'inker' or
|
||||
n.tag == 'colorist' or
|
||||
n.tag == 'letterer' or
|
||||
n.tag == 'editor'
|
||||
):
|
||||
metadata.addCredit( n.text.strip(), n.tag.title() )
|
||||
|
||||
if n.tag == 'coverDesigner':
|
||||
metadata.addCredit( n.text.strip(), "Cover" )
|
||||
|
||||
|
||||
metadata.isEmpty = False
|
||||
|
||||
return metadata
|
||||
|
||||
#verify that the string actually contains CoMet data in XML format
|
||||
def validateString( self, string ):
|
||||
try:
|
||||
tree = ET.ElementTree(ET.fromstring( string ))
|
||||
root = tree.getroot()
|
||||
if root.tag != 'comet':
|
||||
raise Exception
|
||||
except:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def writeToExternalFile( self, filename, metadata ):
|
||||
|
||||
tree = self.convertMetadataToXML( self, metadata )
|
||||
#ET.dump(tree)
|
||||
tree.write(filename, encoding='utf-8')
|
||||
|
||||
def readFromExternalFile( self, filename ):
|
||||
|
||||
tree = ET.parse( filename )
|
||||
return self.convertXMLToMetadata( tree )
|
||||
|
||||
1
comicapi/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__author__ = "dromanin"
|
||||
224
comicapi/comet.py
Normal file
@@ -0,0 +1,224 @@
|
||||
"""A class to encapsulate CoMet data"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import xml.etree.ElementTree as ET
|
||||
from typing import Any
|
||||
|
||||
from comicapi import utils
|
||||
from comicapi.genericmetadata import GenericMetadata
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CoMet:
|
||||
|
||||
writer_synonyms = ["writer", "plotter", "scripter"]
|
||||
penciller_synonyms = ["artist", "penciller", "penciler", "breakdowns"]
|
||||
inker_synonyms = ["inker", "artist", "finishes"]
|
||||
colorist_synonyms = ["colorist", "colourist", "colorer", "colourer"]
|
||||
letterer_synonyms = ["letterer"]
|
||||
cover_synonyms = ["cover", "covers", "coverartist", "cover artist"]
|
||||
editor_synonyms = ["editor"]
|
||||
|
||||
def metadata_from_string(self, string: str) -> GenericMetadata:
|
||||
|
||||
tree = ET.ElementTree(ET.fromstring(string))
|
||||
return self.convert_xml_to_metadata(tree)
|
||||
|
||||
def string_from_metadata(self, metadata: GenericMetadata) -> str:
|
||||
tree = self.convert_metadata_to_xml(metadata)
|
||||
return str(ET.tostring(tree.getroot(), encoding="utf-8", xml_declaration=True).decode("utf-8"))
|
||||
|
||||
def convert_metadata_to_xml(self, metadata: GenericMetadata) -> ET.ElementTree:
|
||||
|
||||
# shorthand for the metadata
|
||||
md = metadata
|
||||
|
||||
# build a tree structure
|
||||
root = ET.Element("comet")
|
||||
root.attrib["xmlns:comet"] = "http://www.denvog.com/comet/"
|
||||
root.attrib["xmlns:xsi"] = "http://www.w3.org/2001/XMLSchema-instance"
|
||||
root.attrib["xsi:schemaLocation"] = "http://www.denvog.com http://www.denvog.com/comet/comet.xsd"
|
||||
|
||||
# helper func
|
||||
def assign(comet_entry: str, md_entry: Any) -> None:
|
||||
if md_entry is not None:
|
||||
ET.SubElement(root, comet_entry).text = str(md_entry)
|
||||
|
||||
# title is manditory
|
||||
if md.title is None:
|
||||
md.title = ""
|
||||
assign("title", md.title)
|
||||
assign("series", md.series)
|
||||
assign("issue", md.issue) # must be int??
|
||||
assign("volume", md.volume)
|
||||
assign("description", md.comments)
|
||||
assign("publisher", md.publisher)
|
||||
assign("pages", md.page_count)
|
||||
assign("format", md.format)
|
||||
assign("language", md.language)
|
||||
assign("rating", md.maturity_rating)
|
||||
assign("price", md.price)
|
||||
assign("isVersionOf", md.is_version_of)
|
||||
assign("rights", md.rights)
|
||||
assign("identifier", md.identifier)
|
||||
assign("lastMark", md.last_mark)
|
||||
assign("genre", md.genre) # TODO repeatable
|
||||
|
||||
if md.characters is not None:
|
||||
char_list = [c.strip() for c in md.characters.split(",")]
|
||||
for c in char_list:
|
||||
assign("character", c)
|
||||
|
||||
if md.manga is not None and md.manga == "YesAndRightToLeft":
|
||||
assign("readingDirection", "rtl")
|
||||
|
||||
if md.year is not None:
|
||||
date_str = str(md.year).zfill(4)
|
||||
if md.month is not None:
|
||||
date_str += "-" + str(md.month).zfill(2)
|
||||
assign("date", date_str)
|
||||
|
||||
assign("coverImage", md.cover_image)
|
||||
|
||||
# loop thru credits, and build a list for each role that CoMet supports
|
||||
for credit in metadata.credits:
|
||||
|
||||
if credit["role"].lower() in set(self.writer_synonyms):
|
||||
ET.SubElement(root, "writer").text = str(credit["person"])
|
||||
|
||||
if credit["role"].lower() in set(self.penciller_synonyms):
|
||||
ET.SubElement(root, "penciller").text = str(credit["person"])
|
||||
|
||||
if credit["role"].lower() in set(self.inker_synonyms):
|
||||
ET.SubElement(root, "inker").text = str(credit["person"])
|
||||
|
||||
if credit["role"].lower() in set(self.colorist_synonyms):
|
||||
ET.SubElement(root, "colorist").text = str(credit["person"])
|
||||
|
||||
if credit["role"].lower() in set(self.letterer_synonyms):
|
||||
ET.SubElement(root, "letterer").text = str(credit["person"])
|
||||
|
||||
if credit["role"].lower() in set(self.cover_synonyms):
|
||||
ET.SubElement(root, "coverDesigner").text = str(credit["person"])
|
||||
|
||||
if credit["role"].lower() in set(self.editor_synonyms):
|
||||
ET.SubElement(root, "editor").text = str(credit["person"])
|
||||
|
||||
ET.indent(root)
|
||||
|
||||
# wrap it in an ElementTree instance, and save as XML
|
||||
tree = ET.ElementTree(root)
|
||||
return tree
|
||||
|
||||
def convert_xml_to_metadata(self, tree: ET.ElementTree) -> GenericMetadata:
|
||||
|
||||
root = tree.getroot()
|
||||
|
||||
if root.tag != "comet":
|
||||
raise Exception("Not a CoMet file")
|
||||
|
||||
metadata = GenericMetadata()
|
||||
md = metadata
|
||||
|
||||
# Helper function
|
||||
def get(tag: str) -> Any:
|
||||
node = root.find(tag)
|
||||
if node is not None:
|
||||
return node.text
|
||||
return None
|
||||
|
||||
md.series = get("series")
|
||||
md.title = get("title")
|
||||
md.issue = get("issue")
|
||||
md.volume = get("volume")
|
||||
md.comments = get("description")
|
||||
md.publisher = get("publisher")
|
||||
md.language = get("language")
|
||||
md.format = get("format")
|
||||
md.page_count = get("pages")
|
||||
md.maturity_rating = get("rating")
|
||||
md.price = get("price")
|
||||
md.is_version_of = get("isVersionOf")
|
||||
md.rights = get("rights")
|
||||
md.identifier = get("identifier")
|
||||
md.last_mark = get("lastMark")
|
||||
md.genre = get("genre") # TODO - repeatable field
|
||||
|
||||
date = get("date")
|
||||
if date is not None:
|
||||
parts = date.split("-")
|
||||
if len(parts) > 0:
|
||||
md.year = parts[0]
|
||||
if len(parts) > 1:
|
||||
md.month = parts[1]
|
||||
|
||||
md.cover_image = get("coverImage")
|
||||
|
||||
reading_direction = get("readingDirection")
|
||||
if reading_direction is not None and reading_direction == "rtl":
|
||||
md.manga = "YesAndRightToLeft"
|
||||
|
||||
# loop for character tags
|
||||
char_list = []
|
||||
for n in root:
|
||||
if n.tag == "character":
|
||||
char_list.append((n.text or "").strip())
|
||||
md.characters = utils.list_to_string(char_list)
|
||||
|
||||
# Now extract the credit info
|
||||
for n in root:
|
||||
if any(
|
||||
[
|
||||
n.tag == "writer",
|
||||
n.tag == "penciller",
|
||||
n.tag == "inker",
|
||||
n.tag == "colorist",
|
||||
n.tag == "letterer",
|
||||
n.tag == "editor",
|
||||
]
|
||||
):
|
||||
metadata.add_credit((n.text or "").strip(), n.tag.title())
|
||||
|
||||
if n.tag == "coverDesigner":
|
||||
metadata.add_credit((n.text or "").strip(), "Cover")
|
||||
|
||||
metadata.is_empty = False
|
||||
|
||||
return metadata
|
||||
|
||||
# verify that the string actually contains CoMet data in XML format
|
||||
def validate_string(self, string: str) -> bool:
|
||||
try:
|
||||
tree = ET.ElementTree(ET.fromstring(string))
|
||||
root = tree.getroot()
|
||||
if root.tag != "comet":
|
||||
raise Exception
|
||||
except:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def write_to_external_file(self, filename: str, metadata: GenericMetadata) -> None:
|
||||
|
||||
tree = self.convert_metadata_to_xml(metadata)
|
||||
tree.write(filename, encoding="utf-8")
|
||||
|
||||
def read_from_external_file(self, filename: str) -> GenericMetadata:
|
||||
|
||||
tree = ET.parse(filename)
|
||||
return self.convert_xml_to_metadata(tree)
|
||||
1194
comicapi/comicarchive.py
Normal file
172
comicapi/comicbookinfo.py
Normal file
@@ -0,0 +1,172 @@
|
||||
"""A class to encapsulate the ComicBookInfo data"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from typing import Any, Literal, TypedDict, Union
|
||||
|
||||
from comicapi import utils
|
||||
from comicapi.genericmetadata import GenericMetadata
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CBILiteralType = Literal[
|
||||
"series",
|
||||
"title",
|
||||
"issue",
|
||||
"publisher",
|
||||
"publicationMonth",
|
||||
"publicationYear",
|
||||
"numberOfIssues",
|
||||
"comments",
|
||||
"genre",
|
||||
"volume",
|
||||
"numberOfVolumes",
|
||||
"language",
|
||||
"country",
|
||||
"rating",
|
||||
"credits",
|
||||
"tags",
|
||||
]
|
||||
|
||||
|
||||
class Credits(TypedDict):
|
||||
person: str
|
||||
role: str
|
||||
primary: bool
|
||||
|
||||
|
||||
class ComicBookInfoJson(TypedDict, total=False):
|
||||
series: str
|
||||
title: str
|
||||
publisher: str
|
||||
publicationMonth: int
|
||||
publicationYear: int
|
||||
issue: int
|
||||
numberOfIssues: int
|
||||
volume: int
|
||||
numberOfVolumes: int
|
||||
rating: int
|
||||
genre: str
|
||||
language: str
|
||||
country: str
|
||||
credits: list[Credits]
|
||||
tags: list[str]
|
||||
comments: str
|
||||
|
||||
|
||||
CBIContainer = TypedDict("CBIContainer", {"appID": str, "lastModified": str, "ComicBookInfo/1.0": ComicBookInfoJson})
|
||||
|
||||
|
||||
class ComicBookInfo:
|
||||
def metadata_from_string(self, string: str) -> GenericMetadata:
|
||||
|
||||
cbi_container = json.loads(string)
|
||||
|
||||
metadata = GenericMetadata()
|
||||
|
||||
cbi = defaultdict(lambda: None, cbi_container["ComicBookInfo/1.0"])
|
||||
|
||||
metadata.series = utils.xlate(cbi["series"])
|
||||
metadata.title = utils.xlate(cbi["title"])
|
||||
metadata.issue = utils.xlate(cbi["issue"])
|
||||
metadata.publisher = utils.xlate(cbi["publisher"])
|
||||
metadata.month = utils.xlate(cbi["publicationMonth"], True)
|
||||
metadata.year = utils.xlate(cbi["publicationYear"], True)
|
||||
metadata.issue_count = utils.xlate(cbi["numberOfIssues"], True)
|
||||
metadata.comments = utils.xlate(cbi["comments"])
|
||||
metadata.genre = utils.xlate(cbi["genre"])
|
||||
metadata.volume = utils.xlate(cbi["volume"], True)
|
||||
metadata.volume_count = utils.xlate(cbi["numberOfVolumes"], True)
|
||||
metadata.language = utils.xlate(cbi["language"])
|
||||
metadata.country = utils.xlate(cbi["country"])
|
||||
metadata.critical_rating = utils.xlate(cbi["rating"])
|
||||
|
||||
metadata.credits = cbi["credits"]
|
||||
metadata.tags = cbi["tags"]
|
||||
|
||||
# make sure credits and tags are at least empty lists and not None
|
||||
if metadata.credits is None:
|
||||
metadata.credits = []
|
||||
if metadata.tags is None:
|
||||
metadata.tags = []
|
||||
|
||||
# need the language string to be ISO
|
||||
if metadata.language is not None:
|
||||
metadata.language = utils.get_language(metadata.language)
|
||||
|
||||
metadata.is_empty = False
|
||||
|
||||
return metadata
|
||||
|
||||
def string_from_metadata(self, metadata: GenericMetadata) -> str:
|
||||
|
||||
cbi_container = self.create_json_dictionary(metadata)
|
||||
return json.dumps(cbi_container)
|
||||
|
||||
def validate_string(self, string: Union[bytes, str]) -> bool:
|
||||
"""Verify that the string actually contains CBI data in JSON format"""
|
||||
|
||||
try:
|
||||
cbi_container = json.loads(string)
|
||||
except:
|
||||
return False
|
||||
|
||||
return "ComicBookInfo/1.0" in cbi_container
|
||||
|
||||
def create_json_dictionary(self, metadata: GenericMetadata) -> CBIContainer:
|
||||
"""Create the dictionary that we will convert to JSON text"""
|
||||
|
||||
cbi_container = CBIContainer(
|
||||
{
|
||||
"appID": "ComicTagger/" + "1.0.0",
|
||||
"lastModified": str(datetime.now()),
|
||||
"ComicBookInfo/1.0": {},
|
||||
}
|
||||
) # TODO: ctversion.version,
|
||||
|
||||
# helper func
|
||||
def assign(cbi_entry: CBILiteralType, md_entry: Any) -> None:
|
||||
if md_entry is not None or isinstance(md_entry, str) and md_entry != "":
|
||||
cbi_container["ComicBookInfo/1.0"][cbi_entry] = md_entry
|
||||
|
||||
assign("series", utils.xlate(metadata.series))
|
||||
assign("title", utils.xlate(metadata.title))
|
||||
assign("issue", utils.xlate(metadata.issue))
|
||||
assign("publisher", utils.xlate(metadata.publisher))
|
||||
assign("publicationMonth", utils.xlate(metadata.month, True))
|
||||
assign("publicationYear", utils.xlate(metadata.year, True))
|
||||
assign("numberOfIssues", utils.xlate(metadata.issue_count, True))
|
||||
assign("comments", utils.xlate(metadata.comments))
|
||||
assign("genre", utils.xlate(metadata.genre))
|
||||
assign("volume", utils.xlate(metadata.volume, True))
|
||||
assign("numberOfVolumes", utils.xlate(metadata.volume_count, True))
|
||||
assign("language", utils.xlate(utils.get_language_from_iso(metadata.language)))
|
||||
assign("country", utils.xlate(metadata.country))
|
||||
assign("rating", utils.xlate(metadata.critical_rating))
|
||||
assign("credits", metadata.credits)
|
||||
assign("tags", metadata.tags)
|
||||
|
||||
return cbi_container
|
||||
|
||||
def write_to_external_file(self, filename: str, metadata: GenericMetadata) -> None:
|
||||
|
||||
cbi_container = self.create_json_dictionary(metadata)
|
||||
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write(json.dumps(cbi_container, indent=4))
|
||||
278
comicapi/comicinfoxml.py
Normal file
@@ -0,0 +1,278 @@
|
||||
"""A class to encapsulate ComicRack's ComicInfo.xml data"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import xml.etree.ElementTree as ET
|
||||
from collections import OrderedDict
|
||||
from typing import Any, List, Optional, cast
|
||||
from xml.etree.ElementTree import ElementTree
|
||||
|
||||
from comicapi import utils
|
||||
from comicapi.genericmetadata import GenericMetadata, ImageMetadata
|
||||
from comicapi.issuestring import IssueString
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ComicInfoXml:
|
||||
|
||||
writer_synonyms = ["writer", "plotter", "scripter"]
|
||||
penciller_synonyms = ["artist", "penciller", "penciler", "breakdowns"]
|
||||
inker_synonyms = ["inker", "artist", "finishes"]
|
||||
colorist_synonyms = ["colorist", "colourist", "colorer", "colourer"]
|
||||
letterer_synonyms = ["letterer"]
|
||||
cover_synonyms = ["cover", "covers", "coverartist", "cover artist"]
|
||||
editor_synonyms = ["editor"]
|
||||
|
||||
def get_parseable_credits(self) -> List[str]:
|
||||
parsable_credits = []
|
||||
parsable_credits.extend(self.writer_synonyms)
|
||||
parsable_credits.extend(self.penciller_synonyms)
|
||||
parsable_credits.extend(self.inker_synonyms)
|
||||
parsable_credits.extend(self.colorist_synonyms)
|
||||
parsable_credits.extend(self.letterer_synonyms)
|
||||
parsable_credits.extend(self.cover_synonyms)
|
||||
parsable_credits.extend(self.editor_synonyms)
|
||||
return parsable_credits
|
||||
|
||||
def metadata_from_string(self, string: bytes) -> GenericMetadata:
|
||||
|
||||
tree = ET.ElementTree(ET.fromstring(string))
|
||||
return self.convert_xml_to_metadata(tree)
|
||||
|
||||
def string_from_metadata(self, metadata: GenericMetadata, xml: bytes = b"") -> str:
|
||||
tree = self.convert_metadata_to_xml(self, metadata, xml)
|
||||
tree_str = ET.tostring(tree.getroot(), encoding="utf-8", xml_declaration=True).decode("utf-8")
|
||||
return str(tree_str)
|
||||
|
||||
def convert_metadata_to_xml(
|
||||
self, filename: "ComicInfoXml", metadata: GenericMetadata, xml: bytes = b""
|
||||
) -> ElementTree:
|
||||
|
||||
# shorthand for the metadata
|
||||
md = metadata
|
||||
|
||||
if xml:
|
||||
root = ET.ElementTree(ET.fromstring(xml)).getroot()
|
||||
else:
|
||||
# build a tree structure
|
||||
root = ET.Element("ComicInfo")
|
||||
root.attrib["xmlns:xsi"] = "http://www.w3.org/2001/XMLSchema-instance"
|
||||
root.attrib["xmlns:xsd"] = "http://www.w3.org/2001/XMLSchema"
|
||||
# helper func
|
||||
|
||||
def assign(cix_entry: str, md_entry: Any) -> None:
|
||||
if md_entry is not None and md_entry:
|
||||
et_entry = root.find(cix_entry)
|
||||
if et_entry is not None:
|
||||
et_entry.text = str(md_entry)
|
||||
else:
|
||||
ET.SubElement(root, cix_entry).text = str(md_entry)
|
||||
else:
|
||||
et_entry = root.find(cix_entry)
|
||||
if et_entry is not None:
|
||||
root.remove(et_entry)
|
||||
|
||||
assign("Title", md.title)
|
||||
assign("Series", md.series)
|
||||
assign("Number", md.issue)
|
||||
assign("Count", md.issue_count)
|
||||
assign("Volume", md.volume)
|
||||
assign("AlternateSeries", md.alternate_series)
|
||||
assign("AlternateNumber", md.alternate_number)
|
||||
assign("StoryArc", md.story_arc)
|
||||
assign("SeriesGroup", md.series_group)
|
||||
assign("AlternateCount", md.alternate_count)
|
||||
assign("Summary", md.comments)
|
||||
assign("Notes", md.notes)
|
||||
assign("Year", md.year)
|
||||
assign("Month", md.month)
|
||||
assign("Day", md.day)
|
||||
|
||||
# need to specially process the credits, since they are structured
|
||||
# differently than CIX
|
||||
credit_writer_list = []
|
||||
credit_penciller_list = []
|
||||
credit_inker_list = []
|
||||
credit_colorist_list = []
|
||||
credit_letterer_list = []
|
||||
credit_cover_list = []
|
||||
credit_editor_list = []
|
||||
|
||||
# first, loop thru credits, and build a list for each role that CIX
|
||||
# supports
|
||||
for credit in metadata.credits:
|
||||
|
||||
if credit["role"].lower() in set(self.writer_synonyms):
|
||||
credit_writer_list.append(credit["person"].replace(",", ""))
|
||||
|
||||
if credit["role"].lower() in set(self.penciller_synonyms):
|
||||
credit_penciller_list.append(credit["person"].replace(",", ""))
|
||||
|
||||
if credit["role"].lower() in set(self.inker_synonyms):
|
||||
credit_inker_list.append(credit["person"].replace(",", ""))
|
||||
|
||||
if credit["role"].lower() in set(self.colorist_synonyms):
|
||||
credit_colorist_list.append(credit["person"].replace(",", ""))
|
||||
|
||||
if credit["role"].lower() in set(self.letterer_synonyms):
|
||||
credit_letterer_list.append(credit["person"].replace(",", ""))
|
||||
|
||||
if credit["role"].lower() in set(self.cover_synonyms):
|
||||
credit_cover_list.append(credit["person"].replace(",", ""))
|
||||
|
||||
if credit["role"].lower() in set(self.editor_synonyms):
|
||||
credit_editor_list.append(credit["person"].replace(",", ""))
|
||||
|
||||
# second, convert each list to string, and add to XML struct
|
||||
assign("Writer", utils.list_to_string(credit_writer_list))
|
||||
|
||||
assign("Penciller", utils.list_to_string(credit_penciller_list))
|
||||
|
||||
assign("Inker", utils.list_to_string(credit_inker_list))
|
||||
|
||||
assign("Colorist", utils.list_to_string(credit_colorist_list))
|
||||
|
||||
assign("Letterer", utils.list_to_string(credit_letterer_list))
|
||||
|
||||
assign("CoverArtist", utils.list_to_string(credit_cover_list))
|
||||
|
||||
assign("Editor", utils.list_to_string(credit_editor_list))
|
||||
|
||||
assign("Publisher", md.publisher)
|
||||
assign("Imprint", md.imprint)
|
||||
assign("Genre", md.genre)
|
||||
assign("Web", md.web_link)
|
||||
assign("PageCount", md.page_count)
|
||||
assign("LanguageISO", md.language)
|
||||
assign("Format", md.format)
|
||||
assign("AgeRating", md.maturity_rating)
|
||||
assign("CommunityRating", md.community_rating)
|
||||
assign("BlackAndWhite", "Yes" if md.black_and_white else None)
|
||||
assign("Manga", md.manga)
|
||||
assign("Characters", md.characters)
|
||||
assign("Teams", md.teams)
|
||||
assign("Locations", md.locations)
|
||||
assign("ScanInformation", md.scan_info)
|
||||
|
||||
# loop and add the page entries under pages node
|
||||
pages_node = root.find("Pages")
|
||||
if pages_node is not None:
|
||||
pages_node.clear()
|
||||
else:
|
||||
pages_node = ET.SubElement(root, "Pages")
|
||||
|
||||
for page_dict in md.pages:
|
||||
page_node = ET.SubElement(pages_node, "Page")
|
||||
page_node.attrib = OrderedDict(sorted((k, str(v)) for k, v in page_dict.items()))
|
||||
|
||||
ET.indent(root)
|
||||
|
||||
# wrap it in an ElementTree instance, and save as XML
|
||||
tree = ET.ElementTree(root)
|
||||
return tree
|
||||
|
||||
def convert_xml_to_metadata(self, tree: ElementTree) -> GenericMetadata:
|
||||
|
||||
root = tree.getroot()
|
||||
|
||||
if root.tag != "ComicInfo":
|
||||
raise Exception("Not a ComicInfo file")
|
||||
|
||||
def get(name: str) -> Optional[str]:
|
||||
tag = root.find(name)
|
||||
if tag is None:
|
||||
return None
|
||||
return tag.text
|
||||
|
||||
md = GenericMetadata()
|
||||
|
||||
md.series = utils.xlate(get("Series"))
|
||||
md.title = utils.xlate(get("Title"))
|
||||
md.issue = IssueString(utils.xlate(get("Number"))).as_string()
|
||||
md.issue_count = utils.xlate(get("Count"), True)
|
||||
md.volume = utils.xlate(get("Volume"), True)
|
||||
md.alternate_series = utils.xlate(get("AlternateSeries"))
|
||||
md.alternate_number = IssueString(utils.xlate(get("AlternateNumber"))).as_string()
|
||||
md.alternate_count = utils.xlate(get("AlternateCount"), True)
|
||||
md.comments = utils.xlate(get("Summary"))
|
||||
md.notes = utils.xlate(get("Notes"))
|
||||
md.year = utils.xlate(get("Year"), True)
|
||||
md.month = utils.xlate(get("Month"), True)
|
||||
md.day = utils.xlate(get("Day"), True)
|
||||
md.publisher = utils.xlate(get("Publisher"))
|
||||
md.imprint = utils.xlate(get("Imprint"))
|
||||
md.genre = utils.xlate(get("Genre"))
|
||||
md.web_link = utils.xlate(get("Web"))
|
||||
md.language = utils.xlate(get("LanguageISO"))
|
||||
md.format = utils.xlate(get("Format"))
|
||||
md.manga = utils.xlate(get("Manga"))
|
||||
md.characters = utils.xlate(get("Characters"))
|
||||
md.teams = utils.xlate(get("Teams"))
|
||||
md.locations = utils.xlate(get("Locations"))
|
||||
md.page_count = utils.xlate(get("PageCount"), True)
|
||||
md.scan_info = utils.xlate(get("ScanInformation"))
|
||||
md.story_arc = utils.xlate(get("StoryArc"))
|
||||
md.series_group = utils.xlate(get("SeriesGroup"))
|
||||
md.maturity_rating = utils.xlate(get("AgeRating"))
|
||||
md.community_rating = utils.xlate(get("CommunityRating"))
|
||||
|
||||
tmp = utils.xlate(get("BlackAndWhite"))
|
||||
if tmp is not None and tmp.lower() in ["yes", "true", "1"]:
|
||||
md.black_and_white = True
|
||||
# Now extract the credit info
|
||||
for n in root:
|
||||
if any(
|
||||
[
|
||||
n.tag == "Writer",
|
||||
n.tag == "Penciller",
|
||||
n.tag == "Inker",
|
||||
n.tag == "Colorist",
|
||||
n.tag == "Letterer",
|
||||
n.tag == "Editor",
|
||||
]
|
||||
):
|
||||
if n.text is not None:
|
||||
for name in n.text.split(","):
|
||||
md.add_credit(name.strip(), n.tag)
|
||||
|
||||
if n.tag == "CoverArtist":
|
||||
if n.text is not None:
|
||||
for name in n.text.split(","):
|
||||
md.add_credit(name.strip(), "Cover")
|
||||
|
||||
# parse page data now
|
||||
pages_node = root.find("Pages")
|
||||
if pages_node is not None:
|
||||
for page in pages_node:
|
||||
p: dict[str, Any] = page.attrib
|
||||
if "Image" in p:
|
||||
p["Image"] = int(p["Image"])
|
||||
md.pages.append(cast(ImageMetadata, p))
|
||||
|
||||
md.is_empty = False
|
||||
|
||||
return md
|
||||
|
||||
def write_to_external_file(self, filename: str, metadata: GenericMetadata, xml: bytes = b"") -> None:
|
||||
|
||||
tree = self.convert_metadata_to_xml(self, metadata, xml)
|
||||
tree.write(filename, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
def read_from_external_file(self, filename: str) -> GenericMetadata:
|
||||
|
||||
tree = ET.parse(filename)
|
||||
return self.convert_xml_to_metadata(tree)
|
||||
130
comicapi/data/publishers.json
Normal file
@@ -0,0 +1,130 @@
|
||||
{
|
||||
"Marvel":{
|
||||
"marvel comics": "",
|
||||
"aircel comics": "Aircel Comics",
|
||||
"aircel": "Aircel Comics",
|
||||
"atlas comics": "Atlas Comics",
|
||||
"atlas": "Atlas Comics",
|
||||
"crossgen comics": "CrossGen comics",
|
||||
"crossgen": "CrossGen comics",
|
||||
"curtis magazines": "Curtis Magazines",
|
||||
"disney books group": "Disney Books Group",
|
||||
"disney books": "Disney Books Group",
|
||||
"disney kingdoms": "Disney Kingdoms",
|
||||
"epic comics group": "Epic Comics",
|
||||
"epic comics": "Epic Comics",
|
||||
"epic": "Epic Comics",
|
||||
"eternity comics": "Eternity Comics",
|
||||
"humorama": "Humorama",
|
||||
"icon comics": "Icon Comics",
|
||||
"infinite comics": "Infinite Comics",
|
||||
"malibu comics": "Malibu Comics",
|
||||
"malibu": "Malibu Comics",
|
||||
"marvel 2099": "Marvel 2099",
|
||||
"marvel absurd": "Marvel Absurd",
|
||||
"marvel adventures": "Marvel Adventures",
|
||||
"marvel age": "Marvel Age",
|
||||
"marvel books": "Marvel Books",
|
||||
"marvel comics 2": "Marvel Comics 2",
|
||||
"marvel digital comics unlimited": "Marvel Unlimited",
|
||||
"marvel edge": "Marvel Edge",
|
||||
"marvel frontier": "Marvel Frontier",
|
||||
"marvel illustrated": "Marvel Illustrated",
|
||||
"marvel knights": "Marvel Knights",
|
||||
"marvel magazine group": "Marvel Magazine Group",
|
||||
"marvel mangaverse": "Marvel Mangaverse",
|
||||
"marvel monsters group": "Marvel Monsters Group",
|
||||
"marvel music": "Marvel Music",
|
||||
"marvel next": "Marvel Next",
|
||||
"marvel noir": "Marvel Noir",
|
||||
"marvel press": "Marvel Press",
|
||||
"marvel uk": "Marvel UK",
|
||||
"marvel unlimited": "Marvel Unlimited",
|
||||
"max": "MAX",
|
||||
"mc2": "Marvel Comics 2",
|
||||
"new universe": "New Universe",
|
||||
"non-pareil publishing corp.": "Non-Pareil Publishing Corp.",
|
||||
"paramount comics": "Paramount Comics",
|
||||
"power comics": "Power Comics",
|
||||
"razorline": "Razorline",
|
||||
"star comics": "Star Comics",
|
||||
"timely comics": "Timely Comics",
|
||||
"timely": "Timely Comics",
|
||||
"tsunami": "Tsunami",
|
||||
"ultimate comics": "Ultimate Comics",
|
||||
"ultimate marvel": "Ultimate Marvel",
|
||||
"vital publications, inc.": "Vital Publications, Inc."
|
||||
},
|
||||
|
||||
"DC Comics":{
|
||||
"dc_comics": "",
|
||||
"dc": "",
|
||||
"dccomics": "",
|
||||
"!mpact comics": "Impact Comics",
|
||||
"all star dc": "All-Star",
|
||||
"all star": "All-Star",
|
||||
"all-star dc": "All-Star",
|
||||
"all-star": "All-Star",
|
||||
"america's best comics": "America's Best Comics",
|
||||
"black label": "DC Black Label",
|
||||
"cliffhanger": "Cliffhanger",
|
||||
"cmx manga": "CMX Manga",
|
||||
"dc black label": "DC Black Label",
|
||||
"dc focus": "DC Focus",
|
||||
"dc ink": "DC Ink",
|
||||
"dc zoom": "DC Zoom",
|
||||
"earth m": "Earth M",
|
||||
"earth one": "Earth One",
|
||||
"earth-m": "Earth M",
|
||||
"elseworlds": "Elseworlds",
|
||||
"eo": "Earth One",
|
||||
"first wave": "First Wave",
|
||||
"focus": "DC Focus",
|
||||
"helix": "Helix",
|
||||
"homage comics": "Homage Comics",
|
||||
"impact comics": "Impact Comics",
|
||||
"impact! comics": "Impact Comics",
|
||||
"johnny dc": "Johnny DC",
|
||||
"mad": "Mad",
|
||||
"minx": "Minx",
|
||||
"paradox press": "Paradox Press",
|
||||
"piranha press": "Piranha Press",
|
||||
"sandman universe": "Sandman Universe",
|
||||
"tangent comics": "Tangent Comics",
|
||||
"tsr": "TSR",
|
||||
"vertigo": "Vertigo",
|
||||
"wildstorm productions": "WildStorm Productions",
|
||||
"wildstorm signature": "WildStorm Productions",
|
||||
"wildstorm": "WildStorm Productions",
|
||||
"wonder comics": "Wonder Comics",
|
||||
"young animal": "Young Animal",
|
||||
"zuda comics": "Zuda Comics",
|
||||
"zuda": "Zuda Comics"
|
||||
},
|
||||
|
||||
"Dark Horse Comics":{
|
||||
"berger books": "Berger Books",
|
||||
"comics' greatest world": "Dark Horse Heroes",
|
||||
"dark horse digital": "Dark Horse Digital",
|
||||
"dark horse heroes": "Dark Horse Heroes",
|
||||
"dark horse manga": "Dark Horse Manga",
|
||||
"dh deluxe": "DH Deluxe",
|
||||
"dh press": "DH Press",
|
||||
"kitchen sink books": "Kitchen Sink Books",
|
||||
"legend": "Legend",
|
||||
"m press": "M Press",
|
||||
"maverick": "Maverick"
|
||||
},
|
||||
|
||||
"Archie Comics":{
|
||||
"archie action": "Archie Action",
|
||||
"archie adventure Series": "Archie Adventure Series",
|
||||
"archie horror": "Archie Horror",
|
||||
"dark circle Comics": "Dark Circle Comics",
|
||||
"dark circle": "Dark Circle Comics",
|
||||
"mighty comics Group": "Mighty Comics Group",
|
||||
"radio comics": "Mighty Comics Group",
|
||||
"red circle Comics": "Dark Circle Comics",
|
||||
"red circle": "Dark Circle Comics"
|
||||
}
|
||||
}
|
||||
348
comicapi/filenamelexer.py
Normal file
@@ -0,0 +1,348 @@
|
||||
import calendar
|
||||
import os
|
||||
import unicodedata
|
||||
from enum import Enum, auto
|
||||
from typing import Any, Callable, Optional, Set
|
||||
|
||||
|
||||
class ItemType(Enum):
|
||||
Error = auto() # Error occurred; value is text of error
|
||||
EOF = auto()
|
||||
Text = auto() # Text
|
||||
LeftParen = auto() # '(' inside action
|
||||
Number = auto() # Simple number
|
||||
IssueNumber = auto() # Preceded by a # Symbol
|
||||
RightParen = auto() # ')' inside action
|
||||
Space = auto() # Run of spaces separating arguments
|
||||
Dot = auto()
|
||||
LeftBrace = auto()
|
||||
RightBrace = auto()
|
||||
LeftSBrace = auto()
|
||||
RightSBrace = auto()
|
||||
Symbol = auto()
|
||||
Skip = auto() # __ or -- no title, issue or series information beyond
|
||||
Operator = auto()
|
||||
Calendar = auto()
|
||||
InfoSpecifier = auto() # Specifies type of info e.g. v1 for 'volume': 1
|
||||
ArchiveType = auto()
|
||||
Honorific = auto()
|
||||
Keywords = auto()
|
||||
FCBD = auto()
|
||||
ComicType = auto()
|
||||
Publisher = auto()
|
||||
C2C = auto()
|
||||
|
||||
|
||||
braces = [
|
||||
ItemType.LeftBrace,
|
||||
ItemType.LeftParen,
|
||||
ItemType.LeftSBrace,
|
||||
ItemType.RightBrace,
|
||||
ItemType.RightParen,
|
||||
ItemType.RightSBrace,
|
||||
]
|
||||
|
||||
eof = chr(0)
|
||||
|
||||
key = {
|
||||
"fcbd": ItemType.FCBD,
|
||||
"freecomicbookday": ItemType.FCBD,
|
||||
"cbr": ItemType.ArchiveType,
|
||||
"cbz": ItemType.ArchiveType,
|
||||
"cbt": ItemType.ArchiveType,
|
||||
"cb7": ItemType.ArchiveType,
|
||||
"rar": ItemType.ArchiveType,
|
||||
"zip": ItemType.ArchiveType,
|
||||
"tar": ItemType.ArchiveType,
|
||||
"7z": ItemType.ArchiveType,
|
||||
"annual": ItemType.ComicType,
|
||||
"book": ItemType.ComicType,
|
||||
"volume": ItemType.InfoSpecifier,
|
||||
"vol.": ItemType.InfoSpecifier,
|
||||
"vol": ItemType.InfoSpecifier,
|
||||
"v": ItemType.InfoSpecifier,
|
||||
"of": ItemType.InfoSpecifier,
|
||||
"dc": ItemType.Publisher,
|
||||
"marvel": ItemType.Publisher,
|
||||
"covers": ItemType.InfoSpecifier,
|
||||
"c2c": ItemType.C2C,
|
||||
"mr": ItemType.Honorific,
|
||||
"ms": ItemType.Honorific,
|
||||
"mrs": ItemType.Honorific,
|
||||
"dr": ItemType.Honorific,
|
||||
}
|
||||
|
||||
|
||||
class Item:
|
||||
def __init__(self, typ: ItemType, pos: int, val: str) -> None:
|
||||
self.typ: ItemType = typ
|
||||
self.pos: int = pos
|
||||
self.val: str = val
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.val}: index: {self.pos}: {self.typ}"
|
||||
|
||||
|
||||
class Lexer:
|
||||
def __init__(self, string: str) -> None:
|
||||
self.input: str = string # The string being scanned
|
||||
self.state: Optional[Callable[[Lexer], Optional[Callable]]] = None # The next lexing function to enter
|
||||
self.pos: int = -1 # Current position in the input
|
||||
self.start: int = 0 # Start position of this item
|
||||
self.lastPos: int = 0 # Position of most recent item returned by nextItem
|
||||
self.paren_depth: int = 0 # Nesting depth of ( ) exprs
|
||||
self.brace_depth: int = 0 # Nesting depth of { }
|
||||
self.sbrace_depth: int = 0 # Nesting depth of [ ]
|
||||
self.items: list[Item] = []
|
||||
|
||||
# Next returns the next rune in the input.
|
||||
def get(self) -> str:
|
||||
if int(self.pos) >= len(self.input) - 1:
|
||||
self.pos += 1
|
||||
return eof
|
||||
|
||||
self.pos += 1
|
||||
return self.input[self.pos]
|
||||
|
||||
# Peek returns but does not consume the next rune in the input.
|
||||
def peek(self) -> str:
|
||||
if int(self.pos) >= len(self.input) - 1:
|
||||
return eof
|
||||
|
||||
return self.input[self.pos + 1]
|
||||
|
||||
def backup(self) -> None:
|
||||
self.pos -= 1
|
||||
|
||||
# Emit passes an item back to the client.
|
||||
def emit(self, t: ItemType) -> None:
|
||||
self.items.append(Item(t, self.start, self.input[self.start : self.pos + 1]))
|
||||
self.start = self.pos + 1
|
||||
|
||||
# Ignore skips over the pending input before this point.
|
||||
def ignore(self) -> None:
|
||||
self.start = self.pos
|
||||
|
||||
# Accept consumes the next rune if it's from the valid se:
|
||||
def accept(self, valid: str) -> bool:
|
||||
if self.get() in valid:
|
||||
return True
|
||||
|
||||
self.backup()
|
||||
return False
|
||||
|
||||
# AcceptRun consumes a run of runes from the valid set.
|
||||
def accept_run(self, valid: str) -> None:
|
||||
while self.get() in valid:
|
||||
pass
|
||||
|
||||
self.backup()
|
||||
|
||||
def scan_number(self) -> bool:
|
||||
digits = "0123456789"
|
||||
|
||||
self.accept_run(digits)
|
||||
if self.accept("."):
|
||||
if self.accept(digits):
|
||||
self.accept_run(digits)
|
||||
else:
|
||||
self.backup()
|
||||
if self.accept("s"):
|
||||
if not self.accept("t"):
|
||||
self.backup()
|
||||
elif self.accept("nr"):
|
||||
if not self.accept("d"):
|
||||
self.backup()
|
||||
elif self.accept("t"):
|
||||
if not self.accept("h"):
|
||||
self.backup()
|
||||
|
||||
return True
|
||||
|
||||
# Runs the state machine for the lexer.
|
||||
def run(self) -> None:
|
||||
self.state = lex_filename
|
||||
while self.state is not None:
|
||||
self.state = self.state(self)
|
||||
|
||||
|
||||
# Errorf returns an error token and terminates the scan by passing
|
||||
# Back a nil pointer that will be the next state, terminating self.nextItem.
|
||||
def errorf(lex: Lexer, message: str) -> Optional[Callable[[Lexer], Optional[Callable]]]:
|
||||
lex.items.append(Item(ItemType.Error, lex.start, message))
|
||||
return None
|
||||
|
||||
|
||||
# Scans the elements inside action delimiters.
|
||||
def lex_filename(lex: Lexer) -> Optional[Callable[[Lexer], Optional[Callable]]]:
|
||||
r = lex.get()
|
||||
if r == eof:
|
||||
if lex.paren_depth != 0:
|
||||
return errorf(lex, "unclosed left paren")
|
||||
|
||||
if lex.brace_depth != 0:
|
||||
return errorf(lex, "unclosed left paren")
|
||||
lex.emit(ItemType.EOF)
|
||||
return None
|
||||
elif is_space(r):
|
||||
if r == "_" and lex.peek() == "_":
|
||||
lex.get()
|
||||
lex.emit(ItemType.Skip)
|
||||
else:
|
||||
return lex_space
|
||||
elif r == ".":
|
||||
r = lex.peek()
|
||||
if r < "0" or "9" < r:
|
||||
lex.emit(ItemType.Dot)
|
||||
return lex_filename
|
||||
|
||||
lex.backup()
|
||||
return lex_number
|
||||
elif r == "'":
|
||||
r = lex.peek()
|
||||
if r in "0123456789":
|
||||
return lex_number
|
||||
lex.emit(ItemType.Text) # TODO: Change to Text
|
||||
elif "0" <= r <= "9":
|
||||
lex.backup()
|
||||
return lex_number
|
||||
elif r == "#":
|
||||
if "0" <= lex.peek() <= "9":
|
||||
return lex_number
|
||||
lex.emit(ItemType.Symbol)
|
||||
elif is_operator(r):
|
||||
if r == "-" and lex.peek() == "-":
|
||||
lex.get()
|
||||
lex.emit(ItemType.Skip)
|
||||
else:
|
||||
return lex_operator
|
||||
elif is_alpha_numeric(r):
|
||||
lex.backup()
|
||||
return lex_text
|
||||
elif r == "(":
|
||||
lex.emit(ItemType.LeftParen)
|
||||
lex.paren_depth += 1
|
||||
elif r == ")":
|
||||
lex.emit(ItemType.RightParen)
|
||||
lex.paren_depth -= 1
|
||||
if lex.paren_depth < 0:
|
||||
return errorf(lex, "unexpected right paren " + r)
|
||||
|
||||
elif r == "{":
|
||||
lex.emit(ItemType.LeftBrace)
|
||||
lex.brace_depth += 1
|
||||
elif r == "}":
|
||||
lex.emit(ItemType.RightBrace)
|
||||
lex.brace_depth -= 1
|
||||
if lex.brace_depth < 0:
|
||||
return errorf(lex, "unexpected right brace " + r)
|
||||
|
||||
elif r == "[":
|
||||
lex.emit(ItemType.LeftSBrace)
|
||||
lex.sbrace_depth += 1
|
||||
elif r == "]":
|
||||
lex.emit(ItemType.RightSBrace)
|
||||
lex.sbrace_depth -= 1
|
||||
if lex.sbrace_depth < 0:
|
||||
return errorf(lex, "unexpected right brace " + r)
|
||||
elif is_symbol(r):
|
||||
lex.emit(ItemType.Symbol)
|
||||
else:
|
||||
return errorf(lex, "unrecognized character in action: " + r)
|
||||
|
||||
return lex_filename
|
||||
|
||||
|
||||
def lex_operator(lex: Lexer) -> Callable:
|
||||
lex.accept_run("-|:;")
|
||||
lex.emit(ItemType.Operator)
|
||||
return lex_filename
|
||||
|
||||
|
||||
# LexSpace scans a run of space characters.
|
||||
# One space has already been seen.
|
||||
def lex_space(lex: Lexer) -> Callable:
|
||||
while is_space(lex.peek()):
|
||||
lex.get()
|
||||
|
||||
lex.emit(ItemType.Space)
|
||||
return lex_filename
|
||||
|
||||
|
||||
# Lex_text scans an alphanumeric.
|
||||
def lex_text(lex: Lexer) -> Callable:
|
||||
while True:
|
||||
r = lex.get()
|
||||
if is_alpha_numeric(r):
|
||||
if r.isnumeric(): # E.g. v1
|
||||
word = lex.input[lex.start : lex.pos]
|
||||
if word.lower() in key and key[word.lower()] == ItemType.InfoSpecifier:
|
||||
lex.backup()
|
||||
lex.emit(key[word.lower()])
|
||||
return lex_filename
|
||||
else:
|
||||
if r == "'" and lex.peek() == "s":
|
||||
lex.get()
|
||||
else:
|
||||
lex.backup()
|
||||
word = lex.input[lex.start : lex.pos + 1]
|
||||
if word.lower() == "vol" and lex.peek() == ".":
|
||||
lex.get()
|
||||
word = lex.input[lex.start : lex.pos + 1]
|
||||
|
||||
if word.lower() in key:
|
||||
lex.emit(key[word.lower()])
|
||||
elif cal(word):
|
||||
lex.emit(ItemType.Calendar)
|
||||
else:
|
||||
lex.emit(ItemType.Text)
|
||||
break
|
||||
|
||||
return lex_filename
|
||||
|
||||
|
||||
def cal(value: str) -> Set[Any]:
|
||||
month_abbr = [i for i, x in enumerate(calendar.month_abbr) if x == value.title()]
|
||||
month_name = [i for i, x in enumerate(calendar.month_name) if x == value.title()]
|
||||
day_abbr = [i for i, x in enumerate(calendar.day_abbr) if x == value.title()]
|
||||
day_name = [i for i, x in enumerate(calendar.day_name) if x == value.title()]
|
||||
return set(month_abbr + month_name + day_abbr + day_name)
|
||||
|
||||
|
||||
def lex_number(lex: Lexer) -> Optional[Callable[[Lexer], Optional[Callable]]]:
|
||||
if not lex.scan_number():
|
||||
return errorf(lex, "bad number syntax: " + lex.input[lex.start : lex.pos])
|
||||
# Complex number logic removed. Messes with math operations without space
|
||||
|
||||
if lex.input[lex.start] == "#":
|
||||
lex.emit(ItemType.IssueNumber)
|
||||
elif not lex.input[lex.pos].isdigit():
|
||||
# Assume that 80th is just text and not a number
|
||||
lex.emit(ItemType.Text)
|
||||
else:
|
||||
lex.emit(ItemType.Number)
|
||||
|
||||
return lex_filename
|
||||
|
||||
|
||||
def is_space(character: str) -> bool:
|
||||
return character in "_ \t"
|
||||
|
||||
|
||||
# IsAlphaNumeric reports whether r is an alphabetic, digit, or underscore.
|
||||
def is_alpha_numeric(character: str) -> bool:
|
||||
return character.isalpha() or character.isnumeric()
|
||||
|
||||
|
||||
def is_operator(character: str) -> bool:
|
||||
return character in "-|:;/\\"
|
||||
|
||||
|
||||
def is_symbol(character: str) -> bool:
|
||||
return unicodedata.category(character)[0] in "PS"
|
||||
|
||||
|
||||
def Lex(filename: str) -> Lexer:
|
||||
lex = Lexer(string=os.path.basename(filename))
|
||||
lex.run()
|
||||
return lex
|
||||
1120
comicapi/filenameparser.py
Normal file
449
comicapi/genericmetadata.py
Normal file
@@ -0,0 +1,449 @@
|
||||
"""A class for internal metadata storage
|
||||
|
||||
The goal of this class is to handle ALL the data that might come from various
|
||||
tagging schemes and databases, such as ComicVine or GCD. This makes conversion
|
||||
possible, however lossy it might be
|
||||
|
||||
"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import Any, List, Optional, TypedDict
|
||||
|
||||
from comicapi import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PageType:
|
||||
|
||||
"""
|
||||
These page info classes are exactly the same as the CIX scheme, since
|
||||
it's unique
|
||||
"""
|
||||
|
||||
FrontCover = "FrontCover"
|
||||
InnerCover = "InnerCover"
|
||||
Roundup = "Roundup"
|
||||
Story = "Story"
|
||||
Advertisement = "Advertisement"
|
||||
Editorial = "Editorial"
|
||||
Letters = "Letters"
|
||||
Preview = "Preview"
|
||||
BackCover = "BackCover"
|
||||
Other = "Other"
|
||||
Deleted = "Deleted"
|
||||
|
||||
|
||||
class ImageMetadata(TypedDict, total=False):
|
||||
Type: str
|
||||
Bookmark: str
|
||||
DoublePage: bool
|
||||
Image: int
|
||||
ImageSize: str
|
||||
ImageHeight: str
|
||||
ImageWidth: str
|
||||
|
||||
|
||||
class CreditMetadata(TypedDict):
|
||||
person: str
|
||||
role: str
|
||||
primary: bool
|
||||
|
||||
|
||||
class GenericMetadata:
|
||||
writer_synonyms = ["writer", "plotter", "scripter"]
|
||||
penciller_synonyms = ["artist", "penciller", "penciler", "breakdowns"]
|
||||
inker_synonyms = ["inker", "artist", "finishes"]
|
||||
colorist_synonyms = ["colorist", "colourist", "colorer", "colourer"]
|
||||
letterer_synonyms = ["letterer"]
|
||||
cover_synonyms = ["cover", "covers", "coverartist", "cover artist"]
|
||||
editor_synonyms = ["editor"]
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
||||
self.is_empty: bool = True
|
||||
self.tag_origin: Optional[str] = None
|
||||
|
||||
self.series: Optional[str] = None
|
||||
self.issue: Optional[str] = None
|
||||
self.title: Optional[str] = None
|
||||
self.publisher: Optional[str] = None
|
||||
self.month: Optional[int] = None
|
||||
self.year: Optional[int] = None
|
||||
self.day: Optional[int] = None
|
||||
self.issue_count: Optional[int] = None
|
||||
self.volume: Optional[int] = None
|
||||
self.genre: Optional[str] = None
|
||||
self.language: Optional[str] = None # 2 letter iso code
|
||||
self.comments: Optional[str] = None # use same way as Summary in CIX
|
||||
|
||||
self.volume_count: Optional[int] = None
|
||||
self.critical_rating: Optional[str] = None
|
||||
self.country: Optional[str] = None
|
||||
|
||||
self.alternate_series: Optional[str] = None
|
||||
self.alternate_number: Optional[str] = None
|
||||
self.alternate_count: Optional[int] = None
|
||||
self.imprint: Optional[str] = None
|
||||
self.notes: Optional[str] = None
|
||||
self.web_link: Optional[str] = None
|
||||
self.format: Optional[str] = None
|
||||
self.manga: Optional[str] = None
|
||||
self.black_and_white: Optional[bool] = None
|
||||
self.page_count: Optional[int] = None
|
||||
self.maturity_rating: Optional[str] = None
|
||||
self.community_rating: Optional[str] = None
|
||||
|
||||
self.story_arc: Optional[str] = None
|
||||
self.series_group: Optional[str] = None
|
||||
self.scan_info: Optional[str] = None
|
||||
|
||||
self.characters: Optional[str] = None
|
||||
self.teams: Optional[str] = None
|
||||
self.locations: Optional[str] = None
|
||||
|
||||
self.credits: List[CreditMetadata] = []
|
||||
self.tags: List[str] = []
|
||||
self.pages: List[ImageMetadata] = []
|
||||
|
||||
# Some CoMet-only items
|
||||
self.price: Optional[str] = None
|
||||
self.is_version_of: Optional[str] = None
|
||||
self.rights: Optional[str] = None
|
||||
self.identifier: Optional[str] = None
|
||||
self.last_mark: Optional[str] = None
|
||||
self.cover_image: Optional[str] = None
|
||||
|
||||
def overlay(self, new_md: "GenericMetadata") -> None:
|
||||
"""Overlay a metadata object on this one
|
||||
|
||||
That is, when the new object has non-None values, over-write them
|
||||
to this one.
|
||||
"""
|
||||
|
||||
def assign(cur: str, new: Any) -> None:
|
||||
if new is not None:
|
||||
if isinstance(new, str) and len(new) == 0:
|
||||
setattr(self, cur, None)
|
||||
else:
|
||||
setattr(self, cur, new)
|
||||
|
||||
if not new_md.is_empty:
|
||||
self.is_empty = False
|
||||
|
||||
assign("series", new_md.series)
|
||||
assign("issue", new_md.issue)
|
||||
assign("issue_count", new_md.issue_count)
|
||||
assign("title", new_md.title)
|
||||
assign("publisher", new_md.publisher)
|
||||
assign("day", new_md.day)
|
||||
assign("month", new_md.month)
|
||||
assign("year", new_md.year)
|
||||
assign("volume", new_md.volume)
|
||||
assign("volume_count", new_md.volume_count)
|
||||
assign("genre", new_md.genre)
|
||||
assign("language", new_md.language)
|
||||
assign("country", new_md.country)
|
||||
assign("critical_rating", new_md.critical_rating)
|
||||
assign("alternate_series", new_md.alternate_series)
|
||||
assign("alternate_number", new_md.alternate_number)
|
||||
assign("alternate_count", new_md.alternate_count)
|
||||
assign("imprint", new_md.imprint)
|
||||
assign("web_link", new_md.web_link)
|
||||
assign("format", new_md.format)
|
||||
assign("manga", new_md.manga)
|
||||
assign("black_and_white", new_md.black_and_white)
|
||||
assign("maturity_rating", new_md.maturity_rating)
|
||||
assign("community_rating", new_md.community_rating)
|
||||
assign("story_arc", new_md.story_arc)
|
||||
assign("series_group", new_md.series_group)
|
||||
assign("scan_info", new_md.scan_info)
|
||||
assign("characters", new_md.characters)
|
||||
assign("teams", new_md.teams)
|
||||
assign("locations", new_md.locations)
|
||||
assign("comments", new_md.comments)
|
||||
assign("notes", new_md.notes)
|
||||
|
||||
assign("price", new_md.price)
|
||||
assign("is_version_of", new_md.is_version_of)
|
||||
assign("rights", new_md.rights)
|
||||
assign("identifier", new_md.identifier)
|
||||
assign("last_mark", new_md.last_mark)
|
||||
|
||||
self.overlay_credits(new_md.credits)
|
||||
# TODO
|
||||
|
||||
# not sure if the tags and pages should broken down, or treated
|
||||
# as whole lists....
|
||||
|
||||
# For now, go the easy route, where any overlay
|
||||
# value wipes out the whole list
|
||||
if len(new_md.tags) > 0:
|
||||
assign("tags", new_md.tags)
|
||||
|
||||
if len(new_md.pages) > 0:
|
||||
assign("pages", new_md.pages)
|
||||
|
||||
def overlay_credits(self, new_credits: List[CreditMetadata]) -> None:
|
||||
for c in new_credits:
|
||||
primary = bool("primary" in c and c["primary"])
|
||||
|
||||
# Remove credit role if person is blank
|
||||
if c["person"] == "":
|
||||
for r in reversed(self.credits):
|
||||
if r["role"].lower() == c["role"].lower():
|
||||
self.credits.remove(r)
|
||||
# otherwise, add it!
|
||||
else:
|
||||
self.add_credit(c["person"], c["role"], primary)
|
||||
|
||||
def set_default_page_list(self, count: int) -> None:
|
||||
# generate a default page list, with the first page marked as the cover
|
||||
for i in range(count):
|
||||
page_dict = ImageMetadata(Image=i)
|
||||
if i == 0:
|
||||
page_dict["Type"] = PageType.FrontCover
|
||||
self.pages.append(page_dict)
|
||||
|
||||
def get_archive_page_index(self, pagenum: int) -> int:
|
||||
# convert the displayed page number to the page index of the file in
|
||||
# the archive
|
||||
if pagenum < len(self.pages):
|
||||
return int(self.pages[pagenum]["Image"])
|
||||
|
||||
return 0
|
||||
|
||||
def get_cover_page_index_list(self) -> list[int]:
|
||||
# return a list of archive page indices of cover pages
|
||||
coverlist = []
|
||||
for p in self.pages:
|
||||
if "Type" in p and p["Type"] == PageType.FrontCover:
|
||||
coverlist.append(int(p["Image"]))
|
||||
|
||||
if len(coverlist) == 0:
|
||||
coverlist.append(0)
|
||||
|
||||
return coverlist
|
||||
|
||||
def add_credit(self, person: str, role: str, primary: bool = False) -> None:
|
||||
|
||||
credit: CreditMetadata = {"person": person, "role": role, "primary": primary}
|
||||
|
||||
# look to see if it's not already there...
|
||||
found = False
|
||||
for c in self.credits:
|
||||
if c["person"].lower() == person.lower() and c["role"].lower() == role.lower():
|
||||
# no need to add it. just adjust the "primary" flag as needed
|
||||
c["primary"] = primary
|
||||
found = True
|
||||
break
|
||||
|
||||
if not found:
|
||||
self.credits.append(credit)
|
||||
|
||||
def get_primary_credit(self, role: str) -> str:
|
||||
primary = ""
|
||||
for credit in self.credits:
|
||||
if (primary == "" and credit["role"].lower() == role.lower()) or (
|
||||
credit["role"].lower() == role.lower() and credit["primary"]
|
||||
):
|
||||
primary = credit["person"]
|
||||
return primary
|
||||
|
||||
def __str__(self) -> str:
|
||||
vals: list[tuple[str, Any]] = []
|
||||
if self.is_empty:
|
||||
return "No metadata"
|
||||
|
||||
def add_string(tag: str, val: Any) -> None:
|
||||
if val is not None and str(val) != "":
|
||||
vals.append((tag, val))
|
||||
|
||||
def add_attr_string(tag: str) -> None:
|
||||
add_string(tag, getattr(self, tag))
|
||||
|
||||
add_attr_string("series")
|
||||
add_attr_string("issue")
|
||||
add_attr_string("issue_count")
|
||||
add_attr_string("title")
|
||||
add_attr_string("publisher")
|
||||
add_attr_string("year")
|
||||
add_attr_string("month")
|
||||
add_attr_string("day")
|
||||
add_attr_string("volume")
|
||||
add_attr_string("volume_count")
|
||||
add_attr_string("genre")
|
||||
add_attr_string("language")
|
||||
add_attr_string("country")
|
||||
add_attr_string("critical_rating")
|
||||
add_attr_string("alternate_series")
|
||||
add_attr_string("alternate_number")
|
||||
add_attr_string("alternate_count")
|
||||
add_attr_string("imprint")
|
||||
add_attr_string("web_link")
|
||||
add_attr_string("format")
|
||||
add_attr_string("manga")
|
||||
|
||||
add_attr_string("price")
|
||||
add_attr_string("is_version_of")
|
||||
add_attr_string("rights")
|
||||
add_attr_string("identifier")
|
||||
add_attr_string("last_mark")
|
||||
|
||||
if self.black_and_white:
|
||||
add_attr_string("black_and_white")
|
||||
add_attr_string("maturity_rating")
|
||||
add_attr_string("community_rating")
|
||||
add_attr_string("story_arc")
|
||||
add_attr_string("series_group")
|
||||
add_attr_string("scan_info")
|
||||
add_attr_string("characters")
|
||||
add_attr_string("teams")
|
||||
add_attr_string("locations")
|
||||
add_attr_string("comments")
|
||||
add_attr_string("notes")
|
||||
|
||||
add_string("tags", utils.list_to_string(self.tags))
|
||||
|
||||
for c in self.credits:
|
||||
primary = ""
|
||||
if "primary" in c and c["primary"]:
|
||||
primary = " [P]"
|
||||
add_string("credit", c["role"] + ": " + c["person"] + primary)
|
||||
|
||||
# find the longest field name
|
||||
flen = 0
|
||||
for i in vals:
|
||||
flen = max(flen, len(i[0]))
|
||||
flen += 1
|
||||
|
||||
# format the data nicely
|
||||
outstr = ""
|
||||
fmt_str = "{0: <" + str(flen) + "} {1}\n"
|
||||
for i in vals:
|
||||
outstr += fmt_str.format(i[0] + ":", i[1])
|
||||
|
||||
return outstr
|
||||
|
||||
def fix_publisher(self) -> None:
|
||||
if self.publisher is None:
|
||||
return
|
||||
if self.imprint is None:
|
||||
self.imprint = ""
|
||||
|
||||
imprint, publisher = utils.get_publisher(self.publisher)
|
||||
|
||||
self.publisher = publisher
|
||||
|
||||
if self.imprint.lower() in publisher.lower():
|
||||
self.imprint = None
|
||||
|
||||
if self.imprint is None or self.imprint == "":
|
||||
self.imprint = imprint
|
||||
elif self.imprint.lower() in imprint.lower():
|
||||
self.imprint = imprint
|
||||
|
||||
|
||||
md_test = GenericMetadata()
|
||||
|
||||
md_test.is_empty = False
|
||||
md_test.tag_origin = None
|
||||
md_test.series = "Cory Doctorow's Futuristic Tales of the Here and Now"
|
||||
md_test.issue = "1"
|
||||
md_test.title = "Anda's Game"
|
||||
md_test.publisher = "IDW Publishing"
|
||||
md_test.month = 10
|
||||
md_test.year = 2007
|
||||
md_test.day = 1
|
||||
md_test.issue_count = 6
|
||||
md_test.volume = 1
|
||||
md_test.genre = "Sci-Fi"
|
||||
md_test.language = "en"
|
||||
md_test.comments = (
|
||||
"For 12-year-old Anda, getting paid real money to kill the characters of players who were cheating in her favorite online "
|
||||
"computer game was a win-win situation. Until she found out who was paying her, and what those characters meant to the "
|
||||
"livelihood of children around the world."
|
||||
)
|
||||
md_test.volume_count = None
|
||||
md_test.critical_rating = None
|
||||
md_test.country = None
|
||||
md_test.alternate_series = "Tales"
|
||||
md_test.alternate_number = "2"
|
||||
md_test.alternate_count = 7
|
||||
md_test.imprint = "craphound.com"
|
||||
md_test.notes = "Tagged with ComicTagger 1.3.2a5 using info from Comic Vine on 2022-04-16 15:52:26. [Issue ID 140529]"
|
||||
md_test.web_link = "https://comicvine.gamespot.com/cory-doctorows-futuristic-tales-of-the-here-and-no/4000-140529/"
|
||||
md_test.format = "Series"
|
||||
md_test.manga = "No"
|
||||
md_test.black_and_white = None
|
||||
md_test.page_count = 24
|
||||
md_test.maturity_rating = "Everyone 10+"
|
||||
md_test.community_rating = "3.0"
|
||||
md_test.story_arc = "Here and Now"
|
||||
md_test.series_group = "Futuristic Tales"
|
||||
md_test.scan_info = "(CC BY-NC-SA 3.0)"
|
||||
md_test.characters = "Anda"
|
||||
md_test.teams = "Fahrenheit"
|
||||
md_test.locations = "lonely cottage "
|
||||
md_test.credits = [
|
||||
CreditMetadata({"primary": False, "person": "Dara Naraghi", "role": "Writer"}),
|
||||
CreditMetadata({"primary": False, "person": "Esteve Polls", "role": "Penciller"}),
|
||||
CreditMetadata({"primary": False, "person": "Esteve Polls", "role": "Inker"}),
|
||||
CreditMetadata({"primary": False, "person": "Neil Uyetake", "role": "Letterer"}),
|
||||
CreditMetadata({"primary": False, "person": "Sam Kieth", "role": "Cover"}),
|
||||
CreditMetadata({"primary": False, "person": "Ted Adams", "role": "Editor"}),
|
||||
]
|
||||
md_test.tags = []
|
||||
md_test.pages = [
|
||||
{"Image": 0, "ImageHeight": "1280", "ImageSize": "195977", "ImageWidth": "800", "Type": PageType.FrontCover},
|
||||
{"Image": 1, "ImageHeight": "2039", "ImageSize": "611993", "ImageWidth": "1327"},
|
||||
{"Image": 2, "ImageHeight": "2039", "ImageSize": "783726", "ImageWidth": "1327"},
|
||||
{"Image": 3, "ImageHeight": "2039", "ImageSize": "679584", "ImageWidth": "1327"},
|
||||
{"Image": 4, "ImageHeight": "2039", "ImageSize": "788179", "ImageWidth": "1327"},
|
||||
{"Image": 5, "ImageHeight": "2039", "ImageSize": "864433", "ImageWidth": "1327"},
|
||||
{"Image": 6, "ImageHeight": "2039", "ImageSize": "765606", "ImageWidth": "1327"},
|
||||
{"Image": 7, "ImageHeight": "2039", "ImageSize": "876427", "ImageWidth": "1327"},
|
||||
{"Image": 8, "ImageHeight": "2039", "ImageSize": "852622", "ImageWidth": "1327"},
|
||||
{"Image": 9, "ImageHeight": "2039", "ImageSize": "800205", "ImageWidth": "1327"},
|
||||
{"Image": 10, "ImageHeight": "2039", "ImageSize": "746243", "ImageWidth": "1326"},
|
||||
{"Image": 11, "ImageHeight": "2039", "ImageSize": "718062", "ImageWidth": "1327"},
|
||||
{"Image": 12, "ImageHeight": "2039", "ImageSize": "532179", "ImageWidth": "1326"},
|
||||
{"Image": 13, "ImageHeight": "2039", "ImageSize": "686708", "ImageWidth": "1327"},
|
||||
{"Image": 14, "ImageHeight": "2039", "ImageSize": "641907", "ImageWidth": "1327"},
|
||||
{"Image": 15, "ImageHeight": "2039", "ImageSize": "805388", "ImageWidth": "1327"},
|
||||
{"Image": 16, "ImageHeight": "2039", "ImageSize": "668927", "ImageWidth": "1326"},
|
||||
{"Image": 17, "ImageHeight": "2039", "ImageSize": "710605", "ImageWidth": "1327"},
|
||||
{"Image": 18, "ImageHeight": "2039", "ImageSize": "761398", "ImageWidth": "1326"},
|
||||
{"Image": 19, "ImageHeight": "2039", "ImageSize": "743807", "ImageWidth": "1327"},
|
||||
{"Image": 20, "ImageHeight": "2039", "ImageSize": "552911", "ImageWidth": "1326"},
|
||||
{"Image": 21, "ImageHeight": "2039", "ImageSize": "556827", "ImageWidth": "1327"},
|
||||
{"Image": 22, "ImageHeight": "2039", "ImageSize": "675078", "ImageWidth": "1326"},
|
||||
{
|
||||
"Bookmark": "Interview",
|
||||
"Image": 23,
|
||||
"ImageHeight": "2032",
|
||||
"ImageSize": "800965",
|
||||
"ImageWidth": "1338",
|
||||
"Type": PageType.Letters,
|
||||
},
|
||||
]
|
||||
md_test.price = None
|
||||
md_test.is_version_of = None
|
||||
md_test.rights = None
|
||||
md_test.identifier = None
|
||||
md_test.last_mark = None
|
||||
md_test.cover_image = None
|
||||
122
comicapi/issuestring.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""Support for mixed digit/string type Issue field
|
||||
|
||||
Class for handling the odd permutations of an 'issue number' that the
|
||||
comics industry throws at us.
|
||||
e.g.: "12", "12.1", "0", "-1", "5AU", "100-2"
|
||||
"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
import unicodedata
|
||||
from typing import Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IssueString:
|
||||
def __init__(self, text: Optional[str]) -> None:
|
||||
|
||||
# break up the issue number string into 2 parts: the numeric and suffix string.
|
||||
# (assumes that the numeric portion is always first)
|
||||
|
||||
self.num = None
|
||||
self.suffix = ""
|
||||
|
||||
if text is None:
|
||||
return
|
||||
|
||||
text = str(text)
|
||||
|
||||
if len(text) == 0:
|
||||
return
|
||||
|
||||
# skip the minus sign if it's first
|
||||
if text[0] == "-":
|
||||
start = 1
|
||||
else:
|
||||
start = 0
|
||||
|
||||
# if it's still not numeric at start skip it
|
||||
if text[start].isdigit() or text[start] == ".":
|
||||
# walk through the string, look for split point (the first
|
||||
# non-numeric)
|
||||
decimal_count = 0
|
||||
for idx in range(start, len(text)):
|
||||
if text[idx] not in "0123456789.":
|
||||
break
|
||||
# special case: also split on second "."
|
||||
if text[idx] == ".":
|
||||
decimal_count += 1
|
||||
if decimal_count > 1:
|
||||
break
|
||||
else:
|
||||
idx = len(text)
|
||||
|
||||
# move trailing numeric decimal to suffix
|
||||
# (only if there is other junk after )
|
||||
if text[idx - 1] == "." and len(text) != idx:
|
||||
idx = idx - 1
|
||||
|
||||
# if there is no numeric after the minus, make the minus part of
|
||||
# the suffix
|
||||
if idx == 1 and start == 1:
|
||||
idx = 0
|
||||
|
||||
part1 = text[0:idx]
|
||||
part2 = text[idx : len(text)]
|
||||
|
||||
if part1 != "":
|
||||
self.num = float(part1)
|
||||
self.suffix = part2
|
||||
else:
|
||||
self.suffix = text
|
||||
|
||||
def as_string(self, pad: int = 0) -> str:
|
||||
# return the float, left side zero-padded, with suffix attached
|
||||
if self.num is None:
|
||||
return self.suffix
|
||||
|
||||
negative = self.num < 0
|
||||
|
||||
num_f = abs(self.num)
|
||||
|
||||
num_int = int(num_f)
|
||||
num_s = str(num_int)
|
||||
if float(num_int) != num_f:
|
||||
num_s = str(num_f)
|
||||
|
||||
num_s += self.suffix
|
||||
|
||||
# create padding
|
||||
padding = ""
|
||||
length = len(str(num_int))
|
||||
if length < pad:
|
||||
padding = "0" * (pad - length)
|
||||
|
||||
num_s = padding + num_s
|
||||
if negative:
|
||||
num_s = "-" + num_s
|
||||
|
||||
return num_s
|
||||
|
||||
def as_float(self) -> Optional[float]:
|
||||
# return the float, with no suffix
|
||||
if len(self.suffix) == 1 and self.suffix.isnumeric():
|
||||
return (self.num or 0) + unicodedata.numeric(self.suffix)
|
||||
|
||||
return 0.5
|
||||
return self.num
|
||||
263
comicapi/utils.py
Normal file
@@ -0,0 +1,263 @@
|
||||
"""Some generic utilities"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import unicodedata
|
||||
from collections import defaultdict
|
||||
from typing import Any, Iterable, List, Optional, Union
|
||||
|
||||
import pycountry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UtilsVars:
|
||||
already_fixed_encoding = False
|
||||
|
||||
|
||||
def get_recursive_filelist(pathlist: List[str]) -> List[str]:
|
||||
"""Get a recursive list of of all files under all path items in the list"""
|
||||
|
||||
filelist = []
|
||||
for p in pathlist:
|
||||
# if path is a folder, walk it recursively, and all files underneath
|
||||
if not isinstance(p, str):
|
||||
# it's probably a QString
|
||||
p = str(p)
|
||||
|
||||
if os.path.isdir(p):
|
||||
for root, _, files in os.walk(p):
|
||||
for f in files:
|
||||
if not isinstance(f, str):
|
||||
# it's probably a QString
|
||||
f = str(f)
|
||||
filelist.append(os.path.join(root, f))
|
||||
else:
|
||||
filelist.append(p)
|
||||
|
||||
return filelist
|
||||
|
||||
|
||||
def list_to_string(lst: List[Union[str, Any]]) -> str:
|
||||
string = ""
|
||||
if lst is not None:
|
||||
for item in lst:
|
||||
if len(string) > 0:
|
||||
string += ", "
|
||||
string += item
|
||||
return string
|
||||
|
||||
|
||||
def add_to_path(dirname: str) -> None:
|
||||
if dirname is not None and dirname != "":
|
||||
|
||||
# verify that path doesn't already contain the given dirname
|
||||
tmpdirname = re.escape(dirname)
|
||||
pattern = r"(^|{sep}){dir}({sep}|$)".format(dir=tmpdirname, sep=os.pathsep)
|
||||
|
||||
match = re.search(pattern, os.environ["PATH"])
|
||||
if not match:
|
||||
os.environ["PATH"] = dirname + os.pathsep + os.environ["PATH"]
|
||||
|
||||
|
||||
def which(program: str) -> Optional[str]:
|
||||
"""Returns path of the executable, if it exists"""
|
||||
|
||||
def is_exe(fpath: str) -> bool:
|
||||
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
|
||||
|
||||
fpath, _ = os.path.split(program)
|
||||
if fpath:
|
||||
if is_exe(program):
|
||||
return program
|
||||
else:
|
||||
for path in os.environ["PATH"].split(os.pathsep):
|
||||
exe_file = os.path.join(path, program)
|
||||
if is_exe(exe_file):
|
||||
return exe_file
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def xlate(data: Any, is_int: bool = False) -> Any:
|
||||
if data is None or data == "":
|
||||
return None
|
||||
if is_int:
|
||||
i = str(data).translate(defaultdict(lambda: None, zip((ord(c) for c in "1234567890"), "1234567890")))
|
||||
if i == "0":
|
||||
return "0"
|
||||
if i == "":
|
||||
return None
|
||||
return int(i)
|
||||
|
||||
return str(data)
|
||||
|
||||
|
||||
def remove_articles(text: str) -> str:
|
||||
text = text.lower()
|
||||
articles = [
|
||||
"&",
|
||||
"a",
|
||||
"am",
|
||||
"an",
|
||||
"and",
|
||||
"as",
|
||||
"at",
|
||||
"be",
|
||||
"but",
|
||||
"by",
|
||||
"for",
|
||||
"if",
|
||||
"is",
|
||||
"issue",
|
||||
"it",
|
||||
"it's",
|
||||
"its",
|
||||
"itself",
|
||||
"of",
|
||||
"or",
|
||||
"so",
|
||||
"the",
|
||||
"the",
|
||||
"with",
|
||||
]
|
||||
new_text = ""
|
||||
for word in text.split(" "):
|
||||
if word not in articles:
|
||||
new_text += word + " "
|
||||
|
||||
new_text = new_text[:-1]
|
||||
|
||||
return new_text
|
||||
|
||||
|
||||
def sanitize_title(text: str) -> str:
|
||||
# normalize unicode and convert to ascii. Does not work for everything eg ½ to 1⁄2 not 1/2
|
||||
# this will probably cause issues with titles in other character sets e.g. chinese, japanese
|
||||
text = unicodedata.normalize("NFKD", text).encode("ascii", "ignore").decode("ascii")
|
||||
# comicvine keeps apostrophes a part of the word
|
||||
text = text.replace("'", "")
|
||||
text = text.replace('"', "")
|
||||
# comicvine ignores punctuation and accents
|
||||
text = re.sub(r"[^A-Za-z0-9]+", " ", text)
|
||||
# remove extra space and articles and all lower case
|
||||
text = remove_articles(text).lower().strip()
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def unique_file(file_name: str) -> str:
|
||||
counter = 1
|
||||
file_name_parts = os.path.splitext(file_name)
|
||||
while True:
|
||||
if not os.path.lexists(file_name):
|
||||
return file_name
|
||||
file_name = file_name_parts[0] + " (" + str(counter) + ")" + file_name_parts[1]
|
||||
counter += 1
|
||||
|
||||
|
||||
languages: dict[Optional[str], Optional[str]] = defaultdict(lambda: None)
|
||||
|
||||
countries: dict[Optional[str], Optional[str]] = defaultdict(lambda: None)
|
||||
|
||||
for c in pycountry.countries:
|
||||
if "alpha_2" in c._fields:
|
||||
countries[c.alpha_2] = c.name
|
||||
|
||||
for lng in pycountry.languages:
|
||||
if "alpha_2" in lng._fields:
|
||||
languages[lng.alpha_2] = lng.name
|
||||
|
||||
|
||||
def get_language_from_iso(iso: Optional[str]) -> Optional[str]:
|
||||
return languages[iso]
|
||||
|
||||
|
||||
def get_language(string: Optional[str]) -> Optional[str]:
|
||||
if string is None:
|
||||
return None
|
||||
|
||||
lang = get_language_from_iso(string)
|
||||
|
||||
if lang is None:
|
||||
try:
|
||||
return str(pycountry.languages.lookup(string).name)
|
||||
except:
|
||||
return None
|
||||
return lang
|
||||
|
||||
|
||||
def get_publisher(publisher: str) -> tuple[str, str]:
|
||||
if publisher is None:
|
||||
return ("", "")
|
||||
imprint = ""
|
||||
|
||||
for pub in publishers.values():
|
||||
imprint, publisher, ok = pub[publisher]
|
||||
if ok:
|
||||
break
|
||||
|
||||
return (imprint, publisher)
|
||||
|
||||
|
||||
def update_publishers(new_publishers: dict[str, dict[str, str]]) -> None:
|
||||
for publisher in new_publishers:
|
||||
if publisher in publishers:
|
||||
publishers[publisher].update(new_publishers[publisher])
|
||||
else:
|
||||
publishers[publisher] = ImprintDict(publisher, new_publishers[publisher])
|
||||
|
||||
|
||||
class ImprintDict(dict):
|
||||
"""
|
||||
ImprintDict takes a publisher and a dict or mapping of lowercased
|
||||
imprint names to the proper imprint name. Retreiving a value from an
|
||||
ImprintDict returns a tuple of (imprint, publisher, keyExists).
|
||||
if the key does not exist the key is returned as the publisher unchanged
|
||||
"""
|
||||
|
||||
def __init__(self, publisher: str, mapping: Iterable = (), **kwargs: Any):
|
||||
super().__init__(mapping, **kwargs)
|
||||
self.publisher = publisher
|
||||
|
||||
def __missing__(self, key: str) -> None:
|
||||
return None
|
||||
|
||||
def __getitem__(self, k: str) -> tuple[str, str, bool]:
|
||||
item = super().__getitem__(k.casefold())
|
||||
if k.casefold() == self.publisher.casefold():
|
||||
return ("", self.publisher, True)
|
||||
if item is None:
|
||||
return ("", k, False)
|
||||
else:
|
||||
return (item, self.publisher, True)
|
||||
|
||||
def copy(self) -> "ImprintDict":
|
||||
return ImprintDict(self.publisher, super().copy())
|
||||
|
||||
|
||||
publishers: dict[str, ImprintDict] = {}
|
||||
|
||||
|
||||
def load_publishers() -> None:
|
||||
try:
|
||||
update_publishers(json.loads((pathlib.Path(__file__).parent / "data" / "publishers.json").read_text("utf-8")))
|
||||
except Exception:
|
||||
logger.exception("Failed to load publishers.json; The are no publishers or imprints loaded")
|
||||
941
comicarchive.py
@@ -1,941 +0,0 @@
|
||||
"""
|
||||
A python class to represent a single comic, be it file or folder of images
|
||||
"""
|
||||
|
||||
"""
|
||||
Copyright 2012 Anthony Beville
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import zipfile
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import tempfile
|
||||
import subprocess
|
||||
import platform
|
||||
if platform.system() == "Windows":
|
||||
import _subprocess
|
||||
import time
|
||||
|
||||
import StringIO
|
||||
try:
|
||||
import Image
|
||||
pil_available = True
|
||||
except ImportError:
|
||||
pil_available = False
|
||||
|
||||
sys.path.insert(0, os.path.abspath(".") )
|
||||
import UnRAR2
|
||||
from UnRAR2.rar_exceptions import *
|
||||
|
||||
from options import Options, MetaDataStyle
|
||||
from comicinfoxml import ComicInfoXml
|
||||
from comicbookinfo import ComicBookInfo
|
||||
from comet import CoMet
|
||||
from genericmetadata import GenericMetadata, PageType
|
||||
from filenameparser import FileNameParser
|
||||
|
||||
|
||||
class ZipArchiver:
|
||||
|
||||
def __init__( self, path ):
|
||||
self.path = path
|
||||
|
||||
def getArchiveComment( self ):
|
||||
zf = zipfile.ZipFile( self.path, 'r' )
|
||||
comment = zf.comment
|
||||
zf.close()
|
||||
return comment
|
||||
|
||||
def setArchiveComment( self, comment ):
|
||||
return self.writeZipComment( self.path, comment )
|
||||
|
||||
def readArchiveFile( self, archive_file ):
|
||||
data = ""
|
||||
zf = zipfile.ZipFile( self.path, 'r' )
|
||||
try:
|
||||
data = zf.read( archive_file )
|
||||
except zipfile.BadZipfile:
|
||||
print "bad zipfile: {0} :: {1}".format(self.path, archive_file)
|
||||
except Exception:
|
||||
print "bad zipfile: {0} :: {1}".format(self.path, archive_file)
|
||||
finally:
|
||||
zf.close()
|
||||
return data
|
||||
|
||||
def removeArchiveFile( self, archive_file ):
|
||||
try:
|
||||
self.rebuildZipFile( [ archive_file ] )
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def writeArchiveFile( self, archive_file, data ):
|
||||
# At the moment, no other option but to rebuild the whole
|
||||
# zip archive w/o the indicated file. Very sucky, but maybe
|
||||
# another solution can be found
|
||||
try:
|
||||
self.rebuildZipFile( [ archive_file ] )
|
||||
|
||||
#now just add the archive file as a new one
|
||||
zf = zipfile.ZipFile(self.path, mode='a', compression=zipfile.ZIP_DEFLATED )
|
||||
zf.writestr( archive_file, data )
|
||||
zf.close()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
def getArchiveFilenameList( self ):
|
||||
zf = zipfile.ZipFile( self.path, 'r' )
|
||||
namelist = zf.namelist()
|
||||
zf.close()
|
||||
return namelist
|
||||
|
||||
# zip helper func
|
||||
def rebuildZipFile( self, exclude_list ):
|
||||
|
||||
# this recompresses the zip archive, without the files in the exclude_list
|
||||
#print "Rebuilding zip {0} without {1}".format( self.path, exclude_list )
|
||||
|
||||
# generate temp file
|
||||
tmp_fd, tmp_name = tempfile.mkstemp( dir=os.path.dirname(self.path) )
|
||||
os.close( tmp_fd )
|
||||
|
||||
zin = zipfile.ZipFile (self.path, 'r')
|
||||
zout = zipfile.ZipFile (tmp_name, 'w')
|
||||
for item in zin.infolist():
|
||||
buffer = zin.read(item.filename)
|
||||
if ( item.filename not in exclude_list ):
|
||||
zout.writestr(item, buffer)
|
||||
|
||||
#preserve the old comment
|
||||
zout.comment = zin.comment
|
||||
|
||||
zout.close()
|
||||
zin.close()
|
||||
|
||||
# replace with the new file
|
||||
os.remove( self.path )
|
||||
os.rename( tmp_name, self.path )
|
||||
|
||||
|
||||
def writeZipComment( self, filename, comment ):
|
||||
"""
|
||||
This is a custom function for writing a comment to a zip file,
|
||||
since the built-in one doesn't seem to work on Windows and Mac OS/X
|
||||
|
||||
Fortunately, the zip comment is at the end of the file, and it's
|
||||
easy to manipulate. See this website for more info:
|
||||
see: http://en.wikipedia.org/wiki/Zip_(file_format)#Structure
|
||||
"""
|
||||
|
||||
#get file size
|
||||
statinfo = os.stat(filename)
|
||||
file_length = statinfo.st_size
|
||||
|
||||
try:
|
||||
fo = open(filename, "r+b")
|
||||
|
||||
#the starting position, relative to EOF
|
||||
pos = -4
|
||||
|
||||
found = False
|
||||
value = bytearray()
|
||||
|
||||
# walk backwards to find the "End of Central Directory" record
|
||||
while ( not found ) and ( -pos != file_length ):
|
||||
# seek, relative to EOF
|
||||
fo.seek( pos, 2)
|
||||
|
||||
value = fo.read( 4 )
|
||||
|
||||
#look for the end of central directory signature
|
||||
if bytearray(value) == bytearray([ 0x50, 0x4b, 0x05, 0x06 ]):
|
||||
found = True
|
||||
else:
|
||||
# not found, step back another byte
|
||||
pos = pos - 1
|
||||
#print pos,"{1} int: {0:x}".format(bytearray(value)[0], value)
|
||||
|
||||
if found:
|
||||
|
||||
# now skip forward 20 bytes to the comment length word
|
||||
pos += 20
|
||||
fo.seek( pos, 2)
|
||||
|
||||
# Pack the length of the comment string
|
||||
format = "H" # one 2-byte integer
|
||||
comment_length = struct.pack(format, len(comment)) # pack integer in a binary string
|
||||
|
||||
# write out the length
|
||||
fo.write( comment_length )
|
||||
fo.seek( pos+2, 2)
|
||||
|
||||
# write out the comment itself
|
||||
fo.write( comment )
|
||||
fo.truncate()
|
||||
fo.close()
|
||||
else:
|
||||
raise Exception('Failed to write comment to zip file!')
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def copyFromArchive( self, otherArchive ):
|
||||
# Replace the current zip with one copied from another archive
|
||||
try:
|
||||
zout = zipfile.ZipFile (self.path, 'w')
|
||||
for fname in otherArchive.getArchiveFilenameList():
|
||||
data = otherArchive.readArchiveFile( fname )
|
||||
if data is not None:
|
||||
zout.writestr( fname, data )
|
||||
zout.close()
|
||||
|
||||
#preserve the old comment
|
||||
comment = otherArchive.getArchiveComment()
|
||||
if comment is not None:
|
||||
if not self.writeZipComment( self.path, comment ):
|
||||
return False
|
||||
except Exception as e:
|
||||
print "Error while copying to {0}: {1}".format(self.path, e)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
#------------------------------------------
|
||||
# RAR implementation
|
||||
|
||||
class RarArchiver:
|
||||
|
||||
devnull = None
|
||||
def __init__( self, path ):
|
||||
self.path = path
|
||||
self.rar_exe_path = None
|
||||
|
||||
if RarArchiver.devnull is None:
|
||||
RarArchiver.devnull = open(os.devnull, "w")
|
||||
|
||||
# windows only, keeps the cmd.exe from popping up
|
||||
if platform.system() == "Windows":
|
||||
self.startupinfo = subprocess.STARTUPINFO()
|
||||
self.startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW
|
||||
else:
|
||||
self.startupinfo = None
|
||||
|
||||
def __del__(self):
|
||||
#RarArchiver.devnull.close()
|
||||
pass
|
||||
|
||||
def getArchiveComment( self ):
|
||||
|
||||
rarc = self.getRARObj()
|
||||
return rarc.comment
|
||||
|
||||
def setArchiveComment( self, comment ):
|
||||
|
||||
if self.rar_exe_path is not None:
|
||||
try:
|
||||
# write comment to temp file
|
||||
tmp_fd, tmp_name = tempfile.mkstemp()
|
||||
f = os.fdopen(tmp_fd, 'w+b')
|
||||
f.write( comment )
|
||||
f.close()
|
||||
|
||||
working_dir = os.path.dirname( os.path.abspath( self.path ) )
|
||||
|
||||
# use external program to write comment to Rar archive
|
||||
subprocess.call([self.rar_exe_path, 'c', '-w' + working_dir , '-c-', '-z' + tmp_name, self.path],
|
||||
startupinfo=self.startupinfo,
|
||||
stdout=RarArchiver.devnull)
|
||||
|
||||
if platform.system() == "Darwin":
|
||||
time.sleep(1)
|
||||
|
||||
os.remove( tmp_name)
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def readArchiveFile( self, archive_file ):
|
||||
|
||||
# Make sure to escape brackets, since some funky stuff is going on
|
||||
# underneath with "fnmatch"
|
||||
archive_file = archive_file.replace("[", '[[]')
|
||||
entries = []
|
||||
|
||||
rarc = self.getRARObj()
|
||||
|
||||
tries = 0
|
||||
while tries < 10:
|
||||
try:
|
||||
tries = tries+1
|
||||
entries = rarc.read_files( archive_file )
|
||||
|
||||
except (OSError, IOError) as e:
|
||||
print e, "in readArchiveFile! try %s" % tries
|
||||
time.sleep(1)
|
||||
except Exception as e:
|
||||
print "Unexpected exception in readArchiveFile! {0}".format( e )
|
||||
break
|
||||
|
||||
else:
|
||||
#Success"
|
||||
#entries is a list of of tuples: ( rarinfo, filedata)
|
||||
if (len(entries) == 1):
|
||||
return entries[0][1]
|
||||
else:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def writeArchiveFile( self, archive_file, data ):
|
||||
|
||||
if self.rar_exe_path is not None:
|
||||
try:
|
||||
tmp_folder = tempfile.mkdtemp()
|
||||
|
||||
tmp_file = os.path.join( tmp_folder, archive_file )
|
||||
|
||||
working_dir = os.path.dirname( os.path.abspath( self.path ) )
|
||||
|
||||
# TODO: will this break if 'archive_file' is in a subfolder. i.e. "foo/bar.txt"
|
||||
# will need to create the subfolder above, I guess...
|
||||
f = open(tmp_file, 'w')
|
||||
f.write( data )
|
||||
f.close()
|
||||
|
||||
# use external program to write file to Rar archive
|
||||
subprocess.call([self.rar_exe_path, 'a', '-w' + working_dir ,'-c-', '-ep', self.path, tmp_file],
|
||||
startupinfo=self.startupinfo,
|
||||
stdout=RarArchiver.devnull)
|
||||
|
||||
if platform.system() == "Darwin":
|
||||
time.sleep(1)
|
||||
os.remove( tmp_file)
|
||||
os.rmdir( tmp_folder)
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def removeArchiveFile( self, archive_file ):
|
||||
if self.rar_exe_path is not None:
|
||||
try:
|
||||
# use external program to remove file from Rar archive
|
||||
subprocess.call([self.rar_exe_path, 'd','-c-', self.path, archive_file],
|
||||
startupinfo=self.startupinfo,
|
||||
stdout=RarArchiver.devnull)
|
||||
|
||||
if platform.system() == "Darwin":
|
||||
time.sleep(1)
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def getArchiveFilenameList( self ):
|
||||
|
||||
rarc = self.getRARObj()
|
||||
#namelist = [ item.filename for item in rarc.infolist() ]
|
||||
#return namelist
|
||||
|
||||
tries = 0
|
||||
while tries < 10:
|
||||
try:
|
||||
tries = tries+1
|
||||
namelist = [ item.filename for item in rarc.infolist() ]
|
||||
|
||||
except (OSError, IOError) as e:
|
||||
print e, "in getArchiveFilenameList! try %s" % tries
|
||||
time.sleep(1)
|
||||
|
||||
else:
|
||||
#Success"
|
||||
return namelist
|
||||
|
||||
raise e
|
||||
|
||||
|
||||
def getRARObj( self ):
|
||||
tries = 0
|
||||
while tries < 10:
|
||||
try:
|
||||
tries = tries+1
|
||||
rarc = UnRAR2.RarFile( self.path )
|
||||
|
||||
except (OSError, IOError) as e:
|
||||
print e, "in getRARObj! try %s" % tries
|
||||
time.sleep(1)
|
||||
|
||||
else:
|
||||
#Success"
|
||||
return rarc
|
||||
|
||||
raise e
|
||||
|
||||
#------------------------------------------
|
||||
# Folder implementation
|
||||
class FolderArchiver:
|
||||
|
||||
def __init__( self, path ):
|
||||
self.path = path
|
||||
self.comment_file_name = "ComicTaggerFolderComment.txt"
|
||||
|
||||
def getArchiveComment( self ):
|
||||
return self.readArchiveFile( self.comment_file_name )
|
||||
|
||||
def setArchiveComment( self, comment ):
|
||||
return self.writeArchiveFile( self.comment_file_name, comment )
|
||||
|
||||
def readArchiveFile( self, archive_file ):
|
||||
|
||||
data = ""
|
||||
fname = os.path.join( self.path, archive_file )
|
||||
try:
|
||||
with open( fname, 'rb' ) as f:
|
||||
data = f.read()
|
||||
f.close()
|
||||
except IOError as e:
|
||||
pass
|
||||
|
||||
return data
|
||||
|
||||
def writeArchiveFile( self, archive_file, data ):
|
||||
|
||||
fname = os.path.join( self.path, archive_file )
|
||||
try:
|
||||
with open(fname, 'w+') as f:
|
||||
f.write( data )
|
||||
f.close()
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def removeArchiveFile( self, archive_file ):
|
||||
|
||||
fname = os.path.join( self.path, archive_file )
|
||||
try:
|
||||
os.remove( fname )
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def getArchiveFilenameList( self ):
|
||||
return self.listFiles( self.path )
|
||||
|
||||
def listFiles( self, folder ):
|
||||
|
||||
itemlist = list()
|
||||
|
||||
for item in os.listdir( folder ):
|
||||
itemlist.append( item )
|
||||
if os.path.isdir( item ):
|
||||
itemlist.extend( self.listFiles( os.path.join( folder, item ) ))
|
||||
|
||||
return itemlist
|
||||
|
||||
#------------------------------------------
|
||||
# Unknown implementation
|
||||
class UnknownArchiver:
|
||||
|
||||
def __init__( self, path ):
|
||||
self.path = path
|
||||
|
||||
def getArchiveComment( self ):
|
||||
return ""
|
||||
def setArchiveComment( self, comment ):
|
||||
return False
|
||||
def readArchiveFilen( self ):
|
||||
return ""
|
||||
def writeArchiveFile( self, archive_file, data ):
|
||||
return False
|
||||
def removeArchiveFile( self, archive_file ):
|
||||
return False
|
||||
def getArchiveFilenameList( self ):
|
||||
return []
|
||||
|
||||
#------------------------------------------------------------------
|
||||
class ComicArchive:
|
||||
|
||||
class ArchiveType:
|
||||
Zip, Rar, Folder, Unknown = range(4)
|
||||
|
||||
def __init__( self, path ):
|
||||
self.path = path
|
||||
self.ci_xml_filename = 'ComicInfo.xml'
|
||||
self.comet_default_filename = 'CoMet.xml'
|
||||
self.resetCache()
|
||||
|
||||
if self.zipTest():
|
||||
self.archive_type = self.ArchiveType.Zip
|
||||
self.archiver = ZipArchiver( self.path )
|
||||
|
||||
elif self.rarTest():
|
||||
self.archive_type = self.ArchiveType.Rar
|
||||
self.archiver = RarArchiver( self.path )
|
||||
|
||||
elif os.path.isdir( self.path ):
|
||||
self.archive_type = self.ArchiveType.Folder
|
||||
self.archiver = FolderArchiver( self.path )
|
||||
else:
|
||||
self.archive_type = self.ArchiveType.Unknown
|
||||
self.archiver = UnknownArchiver( self.path )
|
||||
|
||||
# Clears the cached data
|
||||
def resetCache( self ):
|
||||
self.has_cix = None
|
||||
self.has_cbi = None
|
||||
self.has_comet = None
|
||||
self.comet_filename = None
|
||||
self.page_count = None
|
||||
self.page_list = None
|
||||
self.cix_md = None
|
||||
self.cbi_md = None
|
||||
self.comet_md = None
|
||||
|
||||
def rename( self, path ):
|
||||
self.path = path
|
||||
self.archiver.path = path
|
||||
|
||||
def setExternalRarProgram( self, rar_exe_path ):
|
||||
if self.isRar():
|
||||
self.archiver.rar_exe_path = rar_exe_path
|
||||
|
||||
def zipTest( self ):
|
||||
return zipfile.is_zipfile( self.path )
|
||||
|
||||
def rarTest( self ):
|
||||
try:
|
||||
rarc = UnRAR2.RarFile( self.path )
|
||||
except: # InvalidRARArchive:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def isZip( self ):
|
||||
return self.archive_type == self.ArchiveType.Zip
|
||||
|
||||
def isRar( self ):
|
||||
return self.archive_type == self.ArchiveType.Rar
|
||||
|
||||
def isFolder( self ):
|
||||
return self.archive_type == self.ArchiveType.Folder
|
||||
|
||||
def isWritable( self, check_rar_status=True ):
|
||||
if self.archive_type == self.ArchiveType.Unknown :
|
||||
return False
|
||||
|
||||
elif check_rar_status and self.isRar() and self.archiver.rar_exe_path is None:
|
||||
return False
|
||||
|
||||
elif not os.access(self.path, os.W_OK):
|
||||
return False
|
||||
|
||||
elif ((self.archive_type != self.ArchiveType.Folder) and
|
||||
(not os.access( os.path.dirname( os.path.abspath(self.path)), os.W_OK ))):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def isWritableForStyle( self, data_style ):
|
||||
|
||||
if self.isRar() and data_style == MetaDataStyle.CBI:
|
||||
return False
|
||||
|
||||
return self.isWritable()
|
||||
|
||||
def seemsToBeAComicArchive( self ):
|
||||
|
||||
# Do we even care about extensions??
|
||||
ext = os.path.splitext(self.path)[1].lower()
|
||||
|
||||
if (
|
||||
( self.isZip() or self.isRar() or self.isFolder() )
|
||||
and
|
||||
( self.getNumberOfPages() > 2)
|
||||
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def readMetadata( self, style ):
|
||||
|
||||
if style == MetaDataStyle.CIX:
|
||||
return self.readCIX()
|
||||
elif style == MetaDataStyle.CBI:
|
||||
return self.readCBI()
|
||||
elif style == MetaDataStyle.COMET:
|
||||
return self.readCoMet()
|
||||
else:
|
||||
return GenericMetadata()
|
||||
|
||||
def writeMetadata( self, metadata, style ):
|
||||
|
||||
retcode = None
|
||||
if style == MetaDataStyle.CIX:
|
||||
retcode = self.writeCIX( metadata )
|
||||
elif style == MetaDataStyle.CBI:
|
||||
retcode = self.writeCBI( metadata )
|
||||
elif style == MetaDataStyle.COMET:
|
||||
retcode = self.writeCoMet( metadata )
|
||||
return retcode
|
||||
|
||||
|
||||
def hasMetadata( self, style ):
|
||||
|
||||
if style == MetaDataStyle.CIX:
|
||||
return self.hasCIX()
|
||||
elif style == MetaDataStyle.CBI:
|
||||
return self.hasCBI()
|
||||
elif style == MetaDataStyle.COMET:
|
||||
return self.hasCoMet()
|
||||
else:
|
||||
return False
|
||||
|
||||
def removeMetadata( self, style ):
|
||||
retcode = True
|
||||
if style == MetaDataStyle.CIX:
|
||||
retcode = self.removeCIX()
|
||||
elif style == MetaDataStyle.CBI:
|
||||
retcode = self.removeCBI()
|
||||
elif style == MetaDataStyle.COMET:
|
||||
retcode = self.removeCoMet()
|
||||
return retcode
|
||||
|
||||
def getPage( self, index ):
|
||||
|
||||
image_data = None
|
||||
|
||||
filename = self.getPageName( index )
|
||||
|
||||
if filename is not None:
|
||||
image_data = self.archiver.readArchiveFile( filename )
|
||||
|
||||
return image_data
|
||||
|
||||
def getPageName( self, index ):
|
||||
|
||||
page_list = self.getPageNameList()
|
||||
|
||||
num_pages = len( page_list )
|
||||
if num_pages == 0 or index >= num_pages:
|
||||
return None
|
||||
|
||||
return page_list[index]
|
||||
|
||||
def getPageNameList( self , sort_list=True):
|
||||
|
||||
if self.page_list is None:
|
||||
# get the list file names in the archive, and sort
|
||||
files = self.archiver.getArchiveFilenameList()
|
||||
|
||||
# seems like some archive creators are on Windows, and don't know about case-sensitivity!
|
||||
if sort_list:
|
||||
files.sort(key=lambda x: x.lower())
|
||||
|
||||
# make a sub-list of image files
|
||||
self.page_list = []
|
||||
for name in files:
|
||||
if ( name[-4:].lower() in [ ".jpg", "jpeg", ".png" ] and os.path.basename(name)[0] != "." ):
|
||||
self.page_list.append(name)
|
||||
|
||||
return self.page_list
|
||||
|
||||
def getNumberOfPages( self ):
|
||||
|
||||
if self.page_count is None:
|
||||
self.page_count = len( self.getPageNameList( ) )
|
||||
return self.page_count
|
||||
|
||||
def readCBI( self ):
|
||||
if self.cbi_md is None:
|
||||
raw_cbi = self.readRawCBI()
|
||||
if raw_cbi is None:
|
||||
self.cbi_md = GenericMetadata()
|
||||
else:
|
||||
self.cbi_md = ComicBookInfo().metadataFromString( raw_cbi )
|
||||
|
||||
self.cbi_md.setDefaultPageList( self.getNumberOfPages() )
|
||||
|
||||
return self.cbi_md
|
||||
|
||||
def readRawCBI( self ):
|
||||
if ( not self.hasCBI() ):
|
||||
return None
|
||||
|
||||
return self.archiver.getArchiveComment()
|
||||
|
||||
def hasCBI(self):
|
||||
if self.has_cbi is None:
|
||||
|
||||
#if ( not ( self.isZip() or self.isRar()) or not self.seemsToBeAComicArchive() ):
|
||||
if not self.seemsToBeAComicArchive():
|
||||
self.has_cbi = False
|
||||
else:
|
||||
comment = self.archiver.getArchiveComment()
|
||||
self.has_cbi = ComicBookInfo().validateString( comment )
|
||||
|
||||
return self.has_cbi
|
||||
|
||||
def writeCBI( self, metadata ):
|
||||
if metadata is not None:
|
||||
self.applyArchiveInfoToMetadata( metadata )
|
||||
cbi_string = ComicBookInfo().stringFromMetadata( metadata )
|
||||
write_success = self.archiver.setArchiveComment( cbi_string )
|
||||
if write_success:
|
||||
self.has_cbi = True
|
||||
self.cbi_md = metadata
|
||||
else:
|
||||
self.resetCache()
|
||||
return write_success
|
||||
else:
|
||||
return False
|
||||
|
||||
def removeCBI( self ):
|
||||
if self.hasCBI():
|
||||
write_success = self.archiver.setArchiveComment( "" )
|
||||
if write_success:
|
||||
self.has_cbi = False
|
||||
self.cbi_md = None
|
||||
else:
|
||||
self.resetCache()
|
||||
return write_success
|
||||
return True
|
||||
|
||||
def readCIX( self ):
|
||||
if self.cix_md is None:
|
||||
raw_cix = self.readRawCIX()
|
||||
if raw_cix is None:
|
||||
self.cix_md = GenericMetadata()
|
||||
else:
|
||||
self.cix_md = ComicInfoXml().metadataFromString( raw_cix )
|
||||
|
||||
#validate the existing page list (make sure count is correct)
|
||||
if len ( self.cix_md.pages ) != 0 :
|
||||
if len ( self.cix_md.pages ) != self.getNumberOfPages():
|
||||
# pages array doesn't match the actual number of images we're seeing
|
||||
# in the archive, so discard the data
|
||||
self.cix_md.pages = []
|
||||
|
||||
if len( self.cix_md.pages ) == 0:
|
||||
self.cix_md.setDefaultPageList( self.getNumberOfPages() )
|
||||
|
||||
return self.cix_md
|
||||
|
||||
def readRawCIX( self ):
|
||||
if not self.hasCIX():
|
||||
return None
|
||||
|
||||
return self.archiver.readArchiveFile( self.ci_xml_filename )
|
||||
|
||||
def writeCIX(self, metadata):
|
||||
|
||||
if metadata is not None:
|
||||
self.applyArchiveInfoToMetadata( metadata, calc_page_sizes=True )
|
||||
cix_string = ComicInfoXml().stringFromMetadata( metadata )
|
||||
write_success = self.archiver.writeArchiveFile( self.ci_xml_filename, cix_string )
|
||||
if write_success:
|
||||
self.has_cix = True
|
||||
self.cix_md = metadata
|
||||
else:
|
||||
self.resetCache()
|
||||
return write_success
|
||||
else:
|
||||
return False
|
||||
|
||||
def removeCIX( self ):
|
||||
if self.hasCIX():
|
||||
write_success = self.archiver.removeArchiveFile( self.ci_xml_filename )
|
||||
if write_success:
|
||||
self.has_cix = False
|
||||
self.cix_md = None
|
||||
else:
|
||||
self.resetCache()
|
||||
return write_success
|
||||
return True
|
||||
|
||||
|
||||
def hasCIX(self):
|
||||
if self.has_cix is None:
|
||||
|
||||
if not self.seemsToBeAComicArchive():
|
||||
self.has_cix = False
|
||||
elif self.ci_xml_filename in self.archiver.getArchiveFilenameList():
|
||||
self.has_cix = True
|
||||
else:
|
||||
self.has_cix = False
|
||||
return self.has_cix
|
||||
|
||||
|
||||
def readCoMet( self ):
|
||||
if self.comet_md is None:
|
||||
raw_comet = self.readRawCoMet()
|
||||
if raw_comet is None:
|
||||
self.comet_md = GenericMetadata()
|
||||
else:
|
||||
self.comet_md = CoMet().metadataFromString( raw_comet )
|
||||
|
||||
self.comet_md.setDefaultPageList( self.getNumberOfPages() )
|
||||
#use the coverImage value from the comet_data to mark the cover in this struct
|
||||
# walk through list of images in file, and find the matching one for md.coverImage
|
||||
# need to remove the existing one in the default
|
||||
if self.comet_md.coverImage is not None:
|
||||
cover_idx = 0
|
||||
for idx,f in enumerate(self.getPageNameList()):
|
||||
if self.comet_md.coverImage == f:
|
||||
cover_idx = idx
|
||||
break
|
||||
if cover_idx != 0:
|
||||
del (self.comet_md.pages[0]['Type'] )
|
||||
self.comet_md.pages[ cover_idx ]['Type'] = PageType.FrontCover
|
||||
|
||||
return self.comet_md
|
||||
|
||||
def readRawCoMet( self ):
|
||||
if not self.hasCoMet():
|
||||
print self.path, "doesn't have CoMet data!"
|
||||
return None
|
||||
|
||||
return self.archiver.readArchiveFile( self.comet_filename )
|
||||
|
||||
def writeCoMet(self, metadata):
|
||||
|
||||
if metadata is not None:
|
||||
if not self.hasCoMet():
|
||||
self.comet_filename = self.comet_default_filename
|
||||
|
||||
self.applyArchiveInfoToMetadata( metadata )
|
||||
# Set the coverImage value, if it's not the first page
|
||||
cover_idx = int(metadata.getCoverPageIndexList()[0])
|
||||
if cover_idx != 0:
|
||||
metadata.coverImage = self.getPageName( cover_idx )
|
||||
|
||||
comet_string = CoMet().stringFromMetadata( metadata )
|
||||
write_success = self.archiver.writeArchiveFile( self.comet_filename, comet_string )
|
||||
if write_success:
|
||||
self.has_comet = True
|
||||
self.comet_md = metadata
|
||||
else:
|
||||
self.resetCache()
|
||||
return write_success
|
||||
else:
|
||||
return False
|
||||
|
||||
def removeCoMet( self ):
|
||||
if self.hasCoMet():
|
||||
write_success = self.archiver.removeArchiveFile( self.comet_filename )
|
||||
if write_success:
|
||||
self.has_comet = False
|
||||
self.comet_md = None
|
||||
else:
|
||||
self.resetCache()
|
||||
return write_success
|
||||
return True
|
||||
|
||||
def hasCoMet(self):
|
||||
if self.has_comet is None:
|
||||
self.has_comet = False
|
||||
if not self.seemsToBeAComicArchive():
|
||||
return self.has_comet
|
||||
|
||||
#look at all xml files in root, and search for CoMet data, get first
|
||||
for n in self.archiver.getArchiveFilenameList():
|
||||
if ( os.path.dirname(n) == "" and
|
||||
os.path.splitext(n)[1].lower() == '.xml'):
|
||||
# read in XML file, and validate it
|
||||
data = self.archiver.readArchiveFile( n )
|
||||
if CoMet().validateString( data ):
|
||||
# since we found it, save it!
|
||||
self.comet_filename = n
|
||||
self.has_comet = True
|
||||
break
|
||||
|
||||
return self.has_comet
|
||||
|
||||
|
||||
|
||||
def applyArchiveInfoToMetadata( self, md, calc_page_sizes=False):
|
||||
md.pageCount = self.getNumberOfPages()
|
||||
|
||||
if calc_page_sizes:
|
||||
for p in md.pages:
|
||||
idx = int( p['Image'] )
|
||||
if pil_available:
|
||||
if 'ImageSize' not in p or 'ImageHeight' not in p or 'ImageWidth' not in p:
|
||||
data = self.getPage( idx )
|
||||
if data is not None:
|
||||
try:
|
||||
im = Image.open(StringIO.StringIO(data))
|
||||
w,h = im.size
|
||||
|
||||
p['ImageSize'] = str(len(data))
|
||||
p['ImageHeight'] = str(h)
|
||||
p['ImageWidth'] = str(w)
|
||||
except IOError:
|
||||
p['ImageSize'] = str(len(data))
|
||||
|
||||
else:
|
||||
if 'ImageSize' not in p:
|
||||
data = self.getPage( idx )
|
||||
p['ImageSize'] = str(len(data))
|
||||
|
||||
|
||||
|
||||
def metadataFromFilename( self ):
|
||||
|
||||
metadata = GenericMetadata()
|
||||
|
||||
fnp = FileNameParser()
|
||||
fnp.parseFilename( self.path )
|
||||
|
||||
if fnp.issue != "":
|
||||
metadata.issue = fnp.issue
|
||||
if fnp.series != "":
|
||||
metadata.series = fnp.series
|
||||
if fnp.volume != "":
|
||||
metadata.volume = fnp.volume
|
||||
if fnp.year != "":
|
||||
metadata.year = fnp.year
|
||||
if fnp.issue_count != "":
|
||||
metadata.issueCount = fnp.issue_count
|
||||
|
||||
metadata.isEmpty = False
|
||||
|
||||
return metadata
|
||||
|
||||
def exportAsZip( self, zipfilename ):
|
||||
if self.archive_type == self.ArchiveType.Zip:
|
||||
# nothing to do, we're already a zip
|
||||
return True
|
||||
|
||||
zip_archiver = ZipArchiver( zipfilename )
|
||||
return zip_archiver.copyFromArchive( self.archiver )
|
||||
|
||||
152
comicbookinfo.py
@@ -1,152 +0,0 @@
|
||||
"""
|
||||
A python class to encapsulate the ComicBookInfo data
|
||||
"""
|
||||
|
||||
"""
|
||||
Copyright 2012 Anthony Beville
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
|
||||
import json
|
||||
from datetime import datetime
|
||||
import zipfile
|
||||
|
||||
from genericmetadata import GenericMetadata
|
||||
import utils
|
||||
import ctversion
|
||||
|
||||
class ComicBookInfo:
|
||||
|
||||
|
||||
def metadataFromString( self, string ):
|
||||
|
||||
cbi_container = json.loads( unicode(string, 'utf-8') )
|
||||
|
||||
metadata = GenericMetadata()
|
||||
|
||||
cbi = cbi_container[ 'ComicBookInfo/1.0' ]
|
||||
|
||||
#helper func
|
||||
# If item is not in CBI, return None
|
||||
def xlate( cbi_entry):
|
||||
if cbi_entry in cbi:
|
||||
return cbi[cbi_entry]
|
||||
else:
|
||||
return None
|
||||
|
||||
metadata.series = xlate( 'series' )
|
||||
metadata.title = xlate( 'title' )
|
||||
metadata.issue = xlate( 'issue' )
|
||||
metadata.publisher = xlate( 'publisher' )
|
||||
metadata.month = xlate( 'publicationMonth' )
|
||||
metadata.year = xlate( 'publicationYear' )
|
||||
metadata.issueCount = xlate( 'numberOfIssues' )
|
||||
metadata.comments = xlate( 'comments' )
|
||||
metadata.credits = xlate( 'credits' )
|
||||
metadata.genre = xlate( 'genre' )
|
||||
metadata.volume = xlate( 'volume' )
|
||||
metadata.volumeCount = xlate( 'numberOfVolumes' )
|
||||
metadata.language = xlate( 'language' )
|
||||
metadata.country = xlate( 'country' )
|
||||
metadata.criticalRating = xlate( 'rating' )
|
||||
metadata.tags = xlate( 'tags' )
|
||||
|
||||
# make sure credits and tags are at least empty lists and not None
|
||||
if metadata.credits is None:
|
||||
metadata.credits = []
|
||||
if metadata.tags is None:
|
||||
metadata.tags = []
|
||||
|
||||
#need to massage the language string to be ISO
|
||||
if metadata.language is not None:
|
||||
# reverse look-up
|
||||
pattern = metadata.language
|
||||
metadata.language = None
|
||||
for key in utils.getLanguageDict():
|
||||
if utils.getLanguageDict()[ key ] == pattern.encode('utf-8'):
|
||||
metadata.language = key
|
||||
break
|
||||
|
||||
metadata.isEmpty = False
|
||||
|
||||
return metadata
|
||||
|
||||
def stringFromMetadata( self, metadata ):
|
||||
|
||||
cbi_container = self.createJSONDictionary( metadata )
|
||||
return json.dumps( cbi_container )
|
||||
|
||||
#verify that the string actually contains CBI data in JSON format
|
||||
def validateString( self, string ):
|
||||
|
||||
try:
|
||||
cbi_container = json.loads( string )
|
||||
except:
|
||||
return False
|
||||
|
||||
return ( 'ComicBookInfo/1.0' in cbi_container )
|
||||
|
||||
|
||||
def createJSONDictionary( self, metadata ):
|
||||
|
||||
# Create the dictionary that we will convert to JSON text
|
||||
cbi = dict()
|
||||
cbi_container = {'appID' : 'ComicTagger/' + ctversion.version,
|
||||
'lastModified' : str(datetime.now()),
|
||||
'ComicBookInfo/1.0' : cbi }
|
||||
|
||||
#helper func
|
||||
def assign( cbi_entry, md_entry):
|
||||
if md_entry is not None:
|
||||
cbi[cbi_entry] = md_entry
|
||||
|
||||
#helper func
|
||||
def toInt(s):
|
||||
i = None
|
||||
if type(s) == str or type(s) == int:
|
||||
try:
|
||||
i = int(s)
|
||||
except ValueError:
|
||||
pass
|
||||
return i
|
||||
|
||||
assign( 'series', metadata.series )
|
||||
assign( 'title', metadata.title )
|
||||
assign( 'issue', metadata.issue )
|
||||
assign( 'publisher', metadata.publisher )
|
||||
assign( 'publicationMonth', toInt(metadata.month) )
|
||||
assign( 'publicationYear', toInt(metadata.year) )
|
||||
assign( 'numberOfIssues', toInt(metadata.issueCount) )
|
||||
assign( 'comments', metadata.comments )
|
||||
assign( 'genre', metadata.genre )
|
||||
assign( 'volume', toInt(metadata.volume) )
|
||||
assign( 'numberOfVolumes', toInt(metadata.volumeCount) )
|
||||
assign( 'language', utils.getLanguageFromISO(metadata.language) )
|
||||
assign( 'country', metadata.country )
|
||||
assign( 'rating', metadata.criticalRating )
|
||||
assign( 'credits', metadata.credits )
|
||||
assign( 'tags', metadata.tags )
|
||||
|
||||
return cbi_container
|
||||
|
||||
|
||||
def writeToExternalFile( self, filename, metadata ):
|
||||
|
||||
cbi_container = self.createJSONDictionary(metadata)
|
||||
|
||||
f = open(filename, 'w')
|
||||
f.write(json.dumps(cbi_container, indent=4))
|
||||
f.close
|
||||
|
||||
289
comicinfoxml.py
@@ -1,289 +0,0 @@
|
||||
"""
|
||||
A python class to encapsulate ComicRack's ComicInfo.xml data
|
||||
"""
|
||||
|
||||
"""
|
||||
Copyright 2012 Anthony Beville
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
import zipfile
|
||||
from pprint import pprint
|
||||
import xml.etree.ElementTree as ET
|
||||
from genericmetadata import GenericMetadata
|
||||
import utils
|
||||
|
||||
class ComicInfoXml:
|
||||
|
||||
writer_synonyms = ['writer', 'plotter', 'scripter']
|
||||
penciller_synonyms = [ 'artist', 'penciller', 'penciler', 'breakdowns' ]
|
||||
inker_synonyms = [ 'inker', 'artist', 'finishes' ]
|
||||
colorist_synonyms = [ 'colorist', 'colourist', 'colorer', 'colourer' ]
|
||||
letterer_synonyms = [ 'letterer']
|
||||
cover_synonyms = [ 'cover', 'covers', 'coverartist', 'cover artist' ]
|
||||
editor_synonyms = [ 'editor']
|
||||
|
||||
|
||||
def getParseableCredits( self ):
|
||||
parsable_credits = []
|
||||
parsable_credits.extend( self.writer_synonyms )
|
||||
parsable_credits.extend( self.penciller_synonyms )
|
||||
parsable_credits.extend( self.inker_synonyms )
|
||||
parsable_credits.extend( self.colorist_synonyms )
|
||||
parsable_credits.extend( self.letterer_synonyms )
|
||||
parsable_credits.extend( self.cover_synonyms )
|
||||
parsable_credits.extend( self.editor_synonyms )
|
||||
return parsable_credits
|
||||
|
||||
def metadataFromString( self, string ):
|
||||
|
||||
tree = ET.ElementTree(ET.fromstring( string ))
|
||||
return self.convertXMLToMetadata( tree )
|
||||
|
||||
def stringFromMetadata( self, metadata ):
|
||||
|
||||
header = '<?xml version="1.0"?>\n'
|
||||
|
||||
tree = self.convertMetadataToXML( self, metadata )
|
||||
return header + ET.tostring(tree.getroot())
|
||||
|
||||
def indent( self, elem, level=0 ):
|
||||
# for making the XML output readable
|
||||
i = "\n" + level*" "
|
||||
if len(elem):
|
||||
if not elem.text or not elem.text.strip():
|
||||
elem.text = i + " "
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
for elem in elem:
|
||||
self.indent( elem, level+1 )
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
else:
|
||||
if level and (not elem.tail or not elem.tail.strip()):
|
||||
elem.tail = i
|
||||
|
||||
def convertMetadataToXML( self, filename, metadata ):
|
||||
|
||||
#shorthand for the metadata
|
||||
md = metadata
|
||||
|
||||
# build a tree structure
|
||||
root = ET.Element("ComicInfo")
|
||||
root.attrib['xmlns:xsi']="http://www.w3.org/2001/XMLSchema-instance"
|
||||
root.attrib['xmlns:xsd']="http://www.w3.org/2001/XMLSchema"
|
||||
#helper func
|
||||
def assign( cix_entry, md_entry):
|
||||
if md_entry is not None:
|
||||
ET.SubElement(root, cix_entry).text = u"{0}".format(md_entry)
|
||||
|
||||
assign( 'Series', md.series )
|
||||
assign( 'Number', md.issue )
|
||||
assign( 'Title', md.title )
|
||||
assign( 'Count', md.issueCount )
|
||||
assign( 'Volume', md.volume )
|
||||
assign( 'AlternateSeries', md.alternateSeries )
|
||||
assign( 'AlternateNumber', md.alternateNumber )
|
||||
assign( 'AlternateCount', md.alternateCount )
|
||||
assign( 'Summary', md.comments )
|
||||
assign( 'Notes', md.notes )
|
||||
assign( 'Year', md.year )
|
||||
assign( 'Month', md.month )
|
||||
assign( 'Publisher', md.publisher )
|
||||
assign( 'Imprint', md.imprint )
|
||||
assign( 'Genre', md.genre )
|
||||
assign( 'Web', md.webLink )
|
||||
assign( 'PageCount', md.pageCount )
|
||||
assign( 'Format', md.format )
|
||||
assign( 'LanguageISO', md.language )
|
||||
assign( 'Manga', md.manga )
|
||||
assign( 'Characters', md.characters )
|
||||
assign( 'Teams', md.teams )
|
||||
assign( 'Locations', md.locations )
|
||||
assign( 'ScanInformation', md.scanInfo )
|
||||
assign( 'StoryArc', md.storyArc )
|
||||
assign( 'SeriesGroup', md.seriesGroup )
|
||||
assign( 'AgeRating', md.maturityRating )
|
||||
|
||||
if md.blackAndWhite is not None and md.blackAndWhite:
|
||||
ET.SubElement(root, 'BlackAndWhite').text = "Yes"
|
||||
|
||||
# need to specially process the credits, since they are structured differently than CIX
|
||||
credit_writer_list = list()
|
||||
credit_penciller_list = list()
|
||||
credit_inker_list = list()
|
||||
credit_colorist_list = list()
|
||||
credit_letterer_list = list()
|
||||
credit_cover_list = list()
|
||||
credit_editor_list = list()
|
||||
|
||||
# first, loop thru credits, and build a list for each role that CIX supports
|
||||
for credit in metadata.credits:
|
||||
|
||||
if credit['role'].lower() in set( self.writer_synonyms ):
|
||||
credit_writer_list.append(credit['person'].replace(",",""))
|
||||
|
||||
if credit['role'].lower() in set( self.penciller_synonyms ):
|
||||
credit_penciller_list.append(credit['person'].replace(",",""))
|
||||
|
||||
if credit['role'].lower() in set( self.inker_synonyms ):
|
||||
credit_inker_list.append(credit['person'].replace(",",""))
|
||||
|
||||
if credit['role'].lower() in set( self.colorist_synonyms ):
|
||||
credit_colorist_list.append(credit['person'].replace(",",""))
|
||||
|
||||
if credit['role'].lower() in set( self.letterer_synonyms ):
|
||||
credit_letterer_list.append(credit['person'].replace(",",""))
|
||||
|
||||
if credit['role'].lower() in set( self.cover_synonyms ):
|
||||
credit_cover_list.append(credit['person'].replace(",",""))
|
||||
|
||||
if credit['role'].lower() in set( self.editor_synonyms ):
|
||||
credit_editor_list.append(credit['person'].replace(",",""))
|
||||
|
||||
# second, convert each list to string, and add to XML struct
|
||||
if len( credit_writer_list ) > 0:
|
||||
node = ET.SubElement(root, 'Writer')
|
||||
node.text = utils.listToString( credit_writer_list )
|
||||
|
||||
if len( credit_penciller_list ) > 0:
|
||||
node = ET.SubElement(root, 'Penciller')
|
||||
node.text = utils.listToString( credit_penciller_list )
|
||||
|
||||
if len( credit_inker_list ) > 0:
|
||||
node = ET.SubElement(root, 'Inker')
|
||||
node.text = utils.listToString( credit_inker_list )
|
||||
|
||||
if len( credit_colorist_list ) > 0:
|
||||
node = ET.SubElement(root, 'Colorist')
|
||||
node.text = utils.listToString( credit_colorist_list )
|
||||
|
||||
if len( credit_letterer_list ) > 0:
|
||||
node = ET.SubElement(root, 'Letterer')
|
||||
node.text = utils.listToString( credit_letterer_list )
|
||||
|
||||
if len( credit_cover_list ) > 0:
|
||||
node = ET.SubElement(root, 'CoverArtist')
|
||||
node.text = utils.listToString( credit_cover_list )
|
||||
|
||||
if len( credit_editor_list ) > 0:
|
||||
node = ET.SubElement(root, 'Editor')
|
||||
node.text = utils.listToString( credit_editor_list )
|
||||
|
||||
# loop and add the page entries under pages node
|
||||
if len( md.pages ) > 0:
|
||||
pages_node = ET.SubElement(root, 'Pages')
|
||||
for page_dict in md.pages:
|
||||
page_node = ET.SubElement(pages_node, 'Page')
|
||||
page_node.attrib = page_dict
|
||||
|
||||
# self pretty-print
|
||||
self.indent(root)
|
||||
|
||||
# wrap it in an ElementTree instance, and save as XML
|
||||
tree = ET.ElementTree(root)
|
||||
return tree
|
||||
|
||||
|
||||
def convertXMLToMetadata( self, tree ):
|
||||
|
||||
root = tree.getroot()
|
||||
|
||||
if root.tag != 'ComicInfo':
|
||||
raise 1
|
||||
return None
|
||||
|
||||
metadata = GenericMetadata()
|
||||
md = metadata
|
||||
|
||||
|
||||
# Helper function
|
||||
def xlate( tag ):
|
||||
node = root.find( tag )
|
||||
if node is not None:
|
||||
return node.text
|
||||
else:
|
||||
return None
|
||||
|
||||
md.series = xlate( 'Series' )
|
||||
md.title = xlate( 'Title' )
|
||||
md.issue = xlate( 'Number' )
|
||||
md.issueCount = xlate( 'Count' )
|
||||
md.volume = xlate( 'Volume' )
|
||||
md.alternateSeries = xlate( 'AlternateSeries' )
|
||||
md.alternateNumber = xlate( 'AlternateNumber' )
|
||||
md.alternateCount = xlate( 'AlternateCount' )
|
||||
md.comments = xlate( 'Summary' )
|
||||
md.notes = xlate( 'Notes' )
|
||||
md.year = xlate( 'Year' )
|
||||
md.month = xlate( 'Month' )
|
||||
md.publisher = xlate( 'Publisher' )
|
||||
md.imprint = xlate( 'Imprint' )
|
||||
md.genre = xlate( 'Genre' )
|
||||
md.webLink = xlate( 'Web' )
|
||||
md.language = xlate( 'LanguageISO' )
|
||||
md.format = xlate( 'Format' )
|
||||
md.manga = xlate( 'Manga' )
|
||||
md.characters = xlate( 'Characters' )
|
||||
md.teams = xlate( 'Teams' )
|
||||
md.locations = xlate( 'Locations' )
|
||||
md.pageCount = xlate( 'PageCount' )
|
||||
md.scanInfo = xlate( 'ScanInformation' )
|
||||
md.storyArc = xlate( 'StoryArc' )
|
||||
md.seriesGroup = xlate( 'SeriesGroup' )
|
||||
md.maturityRating = xlate( 'AgeRating' )
|
||||
|
||||
tmp = xlate( 'BlackAndWhite' )
|
||||
md.blackAndWhite = False
|
||||
if tmp is not None and tmp.lower() in [ "yes", "true", "1" ]:
|
||||
md.blackAndWhite = True
|
||||
# Now extract the credit info
|
||||
for n in root:
|
||||
if ( n.tag == 'Writer' or
|
||||
n.tag == 'Penciller' or
|
||||
n.tag == 'Inker' or
|
||||
n.tag == 'Colorist' or
|
||||
n.tag == 'Letterer' or
|
||||
n.tag == 'Editor'
|
||||
):
|
||||
for name in n.text.split(','):
|
||||
metadata.addCredit( name.strip(), n.tag )
|
||||
|
||||
if n.tag == 'CoverArtist':
|
||||
for name in n.text.split(','):
|
||||
metadata.addCredit( name.strip(), "Cover" )
|
||||
|
||||
# parse page data now
|
||||
pages_node = root.find( "Pages" )
|
||||
if pages_node is not None:
|
||||
for page in pages_node:
|
||||
metadata.pages.append( page.attrib )
|
||||
#print page.attrib
|
||||
|
||||
metadata.isEmpty = False
|
||||
|
||||
return metadata
|
||||
|
||||
def writeToExternalFile( self, filename, metadata ):
|
||||
|
||||
tree = self.convertMetadataToXML( self, metadata )
|
||||
#ET.dump(tree)
|
||||
tree.write(filename, encoding='utf-8')
|
||||
|
||||
def readFromExternalFile( self, filename ):
|
||||
|
||||
tree = ET.parse( filename )
|
||||
return self.convertXMLToMetadata( tree )
|
||||
|
||||
506
comictagger.py
@@ -1,506 +1,12 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python3
|
||||
import localefix
|
||||
from comictaggerlib.main import ctmain
|
||||
|
||||
"""
|
||||
A python script to tag comic archives
|
||||
"""
|
||||
|
||||
"""
|
||||
Copyright 2012 Anthony Beville
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import signal
|
||||
import os
|
||||
import traceback
|
||||
import time
|
||||
from pprint import pprint
|
||||
import json
|
||||
import platform
|
||||
import locale
|
||||
|
||||
filename_encoding = sys.getfilesystemencoding()
|
||||
|
||||
try:
|
||||
qt_available = True
|
||||
from PyQt4 import QtCore, QtGui
|
||||
from taggerwindow import TaggerWindow
|
||||
except ImportError as e:
|
||||
qt_available = False
|
||||
|
||||
|
||||
from settings import ComicTaggerSettings
|
||||
from options import Options, MetaDataStyle
|
||||
from comicarchive import ComicArchive
|
||||
from issueidentifier import IssueIdentifier
|
||||
from genericmetadata import GenericMetadata
|
||||
from comicvinetalker import ComicVineTalker, ComicVineTalkerException
|
||||
from filerenamer import FileRenamer
|
||||
from cbltransformer import CBLTransformer
|
||||
|
||||
import utils
|
||||
import codecs
|
||||
|
||||
class MultipleMatch():
|
||||
def __init__( self, filename, match_list):
|
||||
self.filename = filename
|
||||
self.matches = match_list
|
||||
|
||||
class OnlineMatchResults():
|
||||
def __init__(self):
|
||||
self.goodMatches = []
|
||||
self.noMatches = []
|
||||
self.multipleMatches = []
|
||||
self.writeFailures = []
|
||||
|
||||
#-----------------------------
|
||||
|
||||
def actual_issue_data_fetch( match, settings ):
|
||||
|
||||
# now get the particular issue data
|
||||
try:
|
||||
cv_md = ComicVineTalker().fetchIssueData( match['volume_id'], match['issue_number'], settings )
|
||||
except ComicVineTalkerException:
|
||||
print "Network error while getting issue details. Save aborted"
|
||||
return None
|
||||
|
||||
if settings.apply_cbl_transform_on_cv_import:
|
||||
cv_md = CBLTransformer( cv_md, settings ).apply()
|
||||
|
||||
return cv_md
|
||||
|
||||
def actual_metadata_save( ca, opts, md ):
|
||||
|
||||
if not opts.dryrun:
|
||||
# write out the new data
|
||||
if not ca.writeMetadata( md, opts.data_style ):
|
||||
print "The tag save seemed to fail!"
|
||||
return False
|
||||
else:
|
||||
print "Save complete."
|
||||
else:
|
||||
if opts.terse:
|
||||
print "dry-run option was set, so nothing was written"
|
||||
else:
|
||||
print "dry-run option was set, so nothing was written, but here is the final set of tags:"
|
||||
print u"{0}".format(md)
|
||||
return True
|
||||
|
||||
|
||||
def post_process_matches( match_results, opts, settings ):
|
||||
# now go through the match results
|
||||
if opts.show_save_summary:
|
||||
if len( match_results.goodMatches ) > 0:
|
||||
print "\nSuccessful matches:"
|
||||
print "------------------"
|
||||
for f in match_results.goodMatches:
|
||||
print f
|
||||
|
||||
if len( match_results.noMatches ) > 0:
|
||||
print "\nNo matches:"
|
||||
print "------------------"
|
||||
for f in match_results.noMatches:
|
||||
print f
|
||||
|
||||
if len( match_results.writeFailures ) > 0:
|
||||
print "\nFile Write Failures:"
|
||||
print "------------------"
|
||||
for f in match_results.writeFailures:
|
||||
print f
|
||||
|
||||
if not opts.show_save_summary and not opts.interactive:
|
||||
#jusr quit if we're not interactive or showing the summary
|
||||
return
|
||||
|
||||
if len( match_results.multipleMatches ) > 0:
|
||||
print "\nMultiple matches:"
|
||||
print "------------------"
|
||||
for mm in match_results.multipleMatches:
|
||||
print mm.filename
|
||||
for (counter,m) in enumerate(mm.matches):
|
||||
print u" {0}. {1} #{2} [{3}] ({4}/{5}) - {6}".format(counter,
|
||||
m['series'],
|
||||
m['issue_number'],
|
||||
m['publisher'],
|
||||
m['month'],
|
||||
m['year'],
|
||||
m['issue_title'])
|
||||
if opts.interactive:
|
||||
while True:
|
||||
i = raw_input("Choose a match #, or 's' to skip: ")
|
||||
if (i.isdigit() and int(i) in range(len(mm.matches))) or i == 's':
|
||||
break
|
||||
if i != 's':
|
||||
# save the data!
|
||||
# we know at this point, that the file is all good to go
|
||||
ca = ComicArchive( mm.filename )
|
||||
md = create_local_metadata( opts, ca, ca.hasMetadata(opts.data_style) )
|
||||
cv_md = actual_issue_data_fetch(mm.matches[int(i)], settings)
|
||||
md.overlay( cv_md )
|
||||
actual_metadata_save( ca, opts, md )
|
||||
|
||||
|
||||
print
|
||||
|
||||
|
||||
def cli_mode( opts, settings ):
|
||||
if len( opts.file_list ) < 1:
|
||||
print "You must specify at least one filename. Use the -h option for more info"
|
||||
return
|
||||
|
||||
match_results = OnlineMatchResults()
|
||||
|
||||
for f in opts.file_list:
|
||||
f = f.decode(filename_encoding, 'replace')
|
||||
process_file_cli( f, opts, settings, match_results )
|
||||
sys.stdout.flush()
|
||||
|
||||
post_process_matches( match_results, opts, settings )
|
||||
|
||||
|
||||
def create_local_metadata( opts, ca, has_desired_tags ):
|
||||
|
||||
md = GenericMetadata()
|
||||
md.setDefaultPageList( ca.getNumberOfPages() )
|
||||
|
||||
if has_desired_tags:
|
||||
md = ca.readMetadata( opts.data_style )
|
||||
|
||||
# now, overlay the parsed filename info
|
||||
if opts.parse_filename:
|
||||
md.overlay( ca.metadataFromFilename() )
|
||||
|
||||
# finally, use explicit stuff
|
||||
if opts.metadata is not None:
|
||||
md.overlay( opts.metadata )
|
||||
|
||||
return md
|
||||
|
||||
def process_file_cli( filename, opts, settings, match_results ):
|
||||
|
||||
batch_mode = len( opts.file_list ) > 1
|
||||
|
||||
ca = ComicArchive(filename)
|
||||
if settings.rar_exe_path != "":
|
||||
ca.setExternalRarProgram( settings.rar_exe_path )
|
||||
|
||||
if not ca.seemsToBeAComicArchive():
|
||||
print "Sorry, but "+ filename + " is not a comic archive!"
|
||||
return
|
||||
|
||||
#if not ca.isWritableForStyle( opts.data_style ) and ( opts.delete_tags or opts.save_tags or opts.rename_file ):
|
||||
if not ca.isWritable( ) and ( opts.delete_tags or opts.copy_tags or opts.save_tags or opts.rename_file ):
|
||||
print "This archive is not writable for that tag type"
|
||||
return
|
||||
|
||||
has = [ False, False, False ]
|
||||
if ca.hasCIX(): has[ MetaDataStyle.CIX ] = True
|
||||
if ca.hasCBI(): has[ MetaDataStyle.CBI ] = True
|
||||
if ca.hasCoMet(): has[ MetaDataStyle.COMET ] = True
|
||||
|
||||
if opts.print_tags:
|
||||
|
||||
|
||||
if opts.data_style is None:
|
||||
page_count = ca.getNumberOfPages()
|
||||
|
||||
brief = ""
|
||||
|
||||
if batch_mode:
|
||||
brief = "{0}: ".format(filename)
|
||||
|
||||
if ca.isZip(): brief += "ZIP archive "
|
||||
elif ca.isRar(): brief += "RAR archive "
|
||||
elif ca.isFolder(): brief += "Folder archive "
|
||||
|
||||
brief += "({0: >3} pages)".format(page_count)
|
||||
brief += " tags:[ "
|
||||
|
||||
if not ( has[ MetaDataStyle.CBI ] or has[ MetaDataStyle.CIX ] or has[ MetaDataStyle.COMET ] ):
|
||||
brief += "none "
|
||||
else:
|
||||
if has[ MetaDataStyle.CBI ]: brief += "CBL "
|
||||
if has[ MetaDataStyle.CIX ]: brief += "CR "
|
||||
if has[ MetaDataStyle.COMET ]: brief += "CoMet "
|
||||
brief += "]"
|
||||
|
||||
print brief
|
||||
|
||||
if opts.terse:
|
||||
return
|
||||
|
||||
print
|
||||
|
||||
if opts.data_style is None or opts.data_style == MetaDataStyle.CIX:
|
||||
if has[ MetaDataStyle.CIX ]:
|
||||
print "------ComicRack tags--------"
|
||||
if opts.raw:
|
||||
print u"{0}".format(unicode(ca.readRawCIX(), errors='ignore'))
|
||||
else:
|
||||
print u"{0}".format(ca.readCIX())
|
||||
|
||||
if opts.data_style is None or opts.data_style == MetaDataStyle.CBI:
|
||||
if has[ MetaDataStyle.CBI ]:
|
||||
print "------ComicBookLover tags--------"
|
||||
if opts.raw:
|
||||
pprint(json.loads(ca.readRawCBI()))
|
||||
else:
|
||||
print u"{0}".format(ca.readCBI())
|
||||
|
||||
if opts.data_style is None or opts.data_style == MetaDataStyle.COMET:
|
||||
if has[ MetaDataStyle.COMET ]:
|
||||
print "------CoMet tags--------"
|
||||
if opts.raw:
|
||||
print u"{0}".format(ca.readRawCoMet())
|
||||
else:
|
||||
print u"{0}".format(ca.readCoMet())
|
||||
|
||||
|
||||
elif opts.delete_tags:
|
||||
style_name = MetaDataStyle.name[ opts.data_style ]
|
||||
if has[ opts.data_style ]:
|
||||
if not opts.dryrun:
|
||||
if not ca.removeMetadata( opts.data_style ):
|
||||
print "{0}: Tag removal seemed to fail!".format( filename )
|
||||
else:
|
||||
print "{0}: Removed {1} tags.".format( filename, style_name )
|
||||
else:
|
||||
print "{0}: dry-run. {1} tags not removed".format( filename, style_name )
|
||||
else:
|
||||
print "{0}: This archive doesn't have {1} tags to remove.".format( filename, style_name )
|
||||
|
||||
elif opts.copy_tags:
|
||||
dst_style_name = MetaDataStyle.name[ opts.data_style ]
|
||||
if opts.no_overwrite and has[ opts.data_style ]:
|
||||
print "{0}: Already has {1} tags. Not overwriting.".format(filename, dst_style_name)
|
||||
return
|
||||
if opts.copy_source == opts.data_style:
|
||||
print "{0}: Destination and source are same: {1}. Nothing to do.".format(filename, dst_style_name)
|
||||
return
|
||||
|
||||
src_style_name = MetaDataStyle.name[ opts.copy_source ]
|
||||
if has[ opts.copy_source ]:
|
||||
if not opts.dryrun:
|
||||
md = ca.readMetadata( opts.copy_source )
|
||||
|
||||
if settings.apply_cbl_transform_on_bulk_operation and opts.data_style == MetaDataStyle.CBI:
|
||||
md = CBLTransformer( md, settings ).apply()
|
||||
|
||||
if not ca.writeMetadata( md, opts.data_style ):
|
||||
print u"{0}: Tag copy seemed to fail!".format( filename )
|
||||
else:
|
||||
print u"{0}: Copied {1} tags to {2} .".format( filename, src_style_name, dst_style_name )
|
||||
else:
|
||||
print u"{0}: dry-run. {1} tags not copied".format( filename, src_style_name )
|
||||
else:
|
||||
print u"{0}: This archive doesn't have {1} tags to copy.".format( filename, src_style_name )
|
||||
|
||||
|
||||
elif opts.save_tags:
|
||||
|
||||
if opts.no_overwrite and has[ opts.data_style ]:
|
||||
print u"{0}: Already has {1} tags. Not overwriting.".format(filename, MetaDataStyle.name[ opts.data_style ])
|
||||
return
|
||||
|
||||
if batch_mode:
|
||||
print u"Processing {0}: ".format(filename)
|
||||
|
||||
md = create_local_metadata( opts, ca, has[ opts.data_style ] )
|
||||
|
||||
# now, search online
|
||||
if opts.search_online:
|
||||
if opts.issue_id is not None:
|
||||
# we were given the actual ID to search with
|
||||
try:
|
||||
cv_md = ComicVineTalker().fetchIssueDataByIssueID( opts.issue_id, settings )
|
||||
except ComicVineTalkerException:
|
||||
print "Network error while getting issue details. Save aborted"
|
||||
return None
|
||||
|
||||
if cv_md is None:
|
||||
print "No match for ID {0} was found.".format(opts.issue_id)
|
||||
return None
|
||||
|
||||
if settings.apply_cbl_transform_on_cv_import:
|
||||
cv_md = CBLTransformer( cv_md, settings ).apply()
|
||||
else:
|
||||
ii = IssueIdentifier( ca, settings )
|
||||
|
||||
if md is None or md.isEmpty:
|
||||
print "No metadata given to search online with!"
|
||||
return
|
||||
|
||||
def myoutput( text ):
|
||||
if opts.verbose:
|
||||
IssueIdentifier.defaultWriteOutput( text )
|
||||
|
||||
# use our overlayed MD struct to search
|
||||
ii.setAdditionalMetadata( md )
|
||||
ii.onlyUseAdditionalMetaData = True
|
||||
ii.setOutputFunction( myoutput )
|
||||
ii.cover_page_index = md.getCoverPageIndexList()[0]
|
||||
matches = ii.search()
|
||||
|
||||
result = ii.search_result
|
||||
|
||||
found_match = False
|
||||
choices = False
|
||||
low_confidence = False
|
||||
|
||||
if result == ii.ResultNoMatches:
|
||||
pass
|
||||
elif result == ii.ResultFoundMatchButBadCoverScore:
|
||||
low_confidence = True
|
||||
found_match = True
|
||||
elif result == ii.ResultFoundMatchButNotFirstPage :
|
||||
found_match = True
|
||||
elif result == ii.ResultMultipleMatchesWithBadImageScores:
|
||||
low_confidence = True
|
||||
choices = True
|
||||
elif result == ii.ResultOneGoodMatch:
|
||||
found_match = True
|
||||
elif result == ii.ResultMultipleGoodMatches:
|
||||
choices = True
|
||||
|
||||
if choices:
|
||||
print "Online search: Multiple matches. Save aborted"
|
||||
match_results.multipleMatches.append(MultipleMatch(filename,matches))
|
||||
return
|
||||
if low_confidence and opts.abortOnLowConfidence:
|
||||
print "Online search: Low confidence match. Save aborted"
|
||||
match_results.noMatches.append(filename)
|
||||
return
|
||||
if not found_match:
|
||||
print "Online search: No match found. Save aborted"
|
||||
match_results.noMatches.append(filename)
|
||||
return
|
||||
|
||||
|
||||
# we got here, so we have a single match
|
||||
|
||||
# now get the particular issue data
|
||||
cv_md = actual_issue_data_fetch(matches[0], settings)
|
||||
if cv_md is None:
|
||||
return
|
||||
|
||||
md.overlay( cv_md )
|
||||
|
||||
# ok, done building our metadata. time to save
|
||||
if not actual_metadata_save( ca, opts, md ):
|
||||
match_results.writeFailures.append(filename)
|
||||
else:
|
||||
match_results.goodMatches.append(filename)
|
||||
|
||||
elif opts.rename_file:
|
||||
|
||||
msg_hdr = ""
|
||||
if batch_mode:
|
||||
msg_hdr = u"{0}: ".format(filename)
|
||||
|
||||
if opts.data_style is not None:
|
||||
use_tags = has[ opts.data_style ]
|
||||
else:
|
||||
use_tags = False
|
||||
|
||||
md = create_local_metadata( opts, ca, use_tags )
|
||||
|
||||
if md.series is None:
|
||||
print msg_hdr + "Can't rename without series name"
|
||||
return
|
||||
|
||||
new_ext = None # default
|
||||
if settings.rename_extension_based_on_archive:
|
||||
if ca.isZip():
|
||||
new_ext = ".cbz"
|
||||
elif ca.isRar():
|
||||
new_ext = ".cbr"
|
||||
|
||||
renamer = FileRenamer( md )
|
||||
renamer.setTemplate( settings.rename_template )
|
||||
renamer.setIssueZeroPadding( settings.rename_issue_number_padding )
|
||||
renamer.setSmartCleanup( settings.rename_use_smart_string_cleanup )
|
||||
|
||||
new_name = renamer.determineName( filename, ext=new_ext )
|
||||
|
||||
if new_name == os.path.basename(filename):
|
||||
print msg_hdr + "Filename is already good!"
|
||||
return
|
||||
|
||||
folder = os.path.dirname( os.path.abspath( filename ) )
|
||||
new_abs_path = utils.unique_file( os.path.join( folder, new_name ) )
|
||||
|
||||
suffix = ""
|
||||
if not opts.dryrun:
|
||||
# rename the file
|
||||
os.rename( filename, new_abs_path )
|
||||
else:
|
||||
suffix = " (dry-run, no change)"
|
||||
|
||||
print u"renamed '{0}' -> '{1}' {2}".format(os.path.basename(filename), new_name, suffix)
|
||||
|
||||
|
||||
|
||||
|
||||
#-----------------------------
|
||||
|
||||
def main():
|
||||
|
||||
# try to make stdout encodings happy for unicode
|
||||
sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
|
||||
pass
|
||||
|
||||
opts = Options()
|
||||
opts.parseCmdLineArgs()
|
||||
|
||||
settings = ComicTaggerSettings()
|
||||
# make sure unrar program is in the path for the UnRAR class
|
||||
utils.addtopath(os.path.dirname(settings.unrar_exe_path))
|
||||
|
||||
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
||||
|
||||
if not qt_available and not opts.no_gui:
|
||||
opts.no_gui = True
|
||||
print "QT is not available."
|
||||
|
||||
if opts.no_gui:
|
||||
cli_mode( opts, settings )
|
||||
|
||||
else:
|
||||
|
||||
app = QtGui.QApplication(sys.argv)
|
||||
|
||||
if platform.system() != "Linux":
|
||||
img = QtGui.QPixmap(os.path.join(ComicTaggerSettings.baseDir(), 'graphics/tags.png' ))
|
||||
splash = QtGui.QSplashScreen(img)
|
||||
splash.show()
|
||||
splash.raise_()
|
||||
app.processEvents()
|
||||
|
||||
try:
|
||||
tagger_window = TaggerWindow( opts.file_list, settings )
|
||||
tagger_window.show()
|
||||
|
||||
if platform.system() != "Linux":
|
||||
splash.finish( tagger_window )
|
||||
|
||||
sys.exit(app.exec_())
|
||||
except Exception, e:
|
||||
QtGui.QMessageBox.critical(QtGui.QMainWindow(), "Error", "Unhandled exception in app:\n" + traceback.format_exc() )
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
|
||||
|
||||
localefix.configure_locale()
|
||||
ctmain()
|
||||
|
||||
51
comictagger.spec
Normal file
@@ -0,0 +1,51 @@
|
||||
# -*- mode: python -*-
|
||||
|
||||
import platform
|
||||
from os.path import join
|
||||
from comictaggerlib import ctversion
|
||||
from PyInstaller.utils.hooks import get_module_file_attribute
|
||||
|
||||
enable_console = False
|
||||
binaries = []
|
||||
block_cipher = None
|
||||
|
||||
if platform.system() == "Windows":
|
||||
enable_console = True
|
||||
|
||||
a = Analysis(['comictagger.py'],
|
||||
binaries=binaries,
|
||||
datas=[('comictaggerlib/ui/*.ui', 'ui'), ('comictaggerlib/graphics', 'graphics'), ('comicapi/data', 'comicapi/data'),(os.path.join(os.path.dirname(get_module_file_attribute('wordninja')),"wordninja"), "wordninja")],
|
||||
hiddenimports=['PIL'],
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher)
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
# single file setup
|
||||
exclude_binaries=False,
|
||||
name='comictagger',
|
||||
debug=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
console=enable_console,
|
||||
icon="windows/app.ico" )
|
||||
|
||||
app = BUNDLE(exe,
|
||||
name='ComicTagger.app',
|
||||
icon='mac/app.icns',
|
||||
info_plist={
|
||||
'NSHighResolutionCapable': 'True',
|
||||
'NSRequiresAquaSystemAppearance': 'False',
|
||||
'CFBundleDisplayName': 'ComicTagger',
|
||||
'CFBundleShortVersionString': ctversion.version,
|
||||
'CFBundleVersion': ctversion.version
|
||||
},
|
||||
bundle_identifier=None)
|
||||
0
comictaggerlib/__init__.py
Normal file
259
comictaggerlib/autotagmatchwindow.py
Normal file
@@ -0,0 +1,259 @@
|
||||
"""A PyQT4 dialog to select from automated issue matches"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Callable, List
|
||||
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets, uic
|
||||
|
||||
from comicapi.comicarchive import MetaDataStyle
|
||||
from comicapi.genericmetadata import GenericMetadata
|
||||
from comictaggerlib.coverimagewidget import CoverImageWidget
|
||||
from comictaggerlib.resulttypes import IssueResult, MultipleMatch
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
from comictaggerlib.ui.qtutils import reduce_widget_font_size
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AutoTagMatchWindow(QtWidgets.QDialog):
|
||||
volume_id = 0
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
parent: QtWidgets.QWidget,
|
||||
match_set_list: List[MultipleMatch],
|
||||
style: int,
|
||||
fetch_func: Callable[[IssueResult], GenericMetadata],
|
||||
settings: ComicTaggerSettings,
|
||||
) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("matchselectionwindow.ui"), self)
|
||||
|
||||
self.settings = settings
|
||||
|
||||
self.current_match_set: MultipleMatch = match_set_list[0]
|
||||
|
||||
self.altCoverWidget = CoverImageWidget(self.altCoverContainer, CoverImageWidget.AltCoverMode)
|
||||
gridlayout = QtWidgets.QGridLayout(self.altCoverContainer)
|
||||
gridlayout.addWidget(self.altCoverWidget)
|
||||
gridlayout.setContentsMargins(0, 0, 0, 0)
|
||||
|
||||
self.archiveCoverWidget = CoverImageWidget(self.archiveCoverContainer, CoverImageWidget.ArchiveMode)
|
||||
gridlayout = QtWidgets.QGridLayout(self.archiveCoverContainer)
|
||||
gridlayout.addWidget(self.archiveCoverWidget)
|
||||
gridlayout.setContentsMargins(0, 0, 0, 0)
|
||||
|
||||
reduce_widget_font_size(self.twList)
|
||||
reduce_widget_font_size(self.teDescription, 1)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowType(
|
||||
self.windowFlags()
|
||||
| QtCore.Qt.WindowType.WindowSystemMenuHint
|
||||
| QtCore.Qt.WindowType.WindowMaximizeButtonHint
|
||||
)
|
||||
)
|
||||
|
||||
self.skipButton = QtWidgets.QPushButton("Skip to Next")
|
||||
self.buttonBox.addButton(self.skipButton, QtWidgets.QDialogButtonBox.ButtonRole.ActionRole)
|
||||
self.buttonBox.button(QtWidgets.QDialogButtonBox.StandardButton.Ok).setText("Accept and Write Tags")
|
||||
|
||||
self.match_set_list = match_set_list
|
||||
self._style = style
|
||||
self.fetch_func = fetch_func
|
||||
|
||||
self.current_match_set_idx = 0
|
||||
|
||||
self.twList.currentItemChanged.connect(self.current_item_changed)
|
||||
self.twList.cellDoubleClicked.connect(self.cell_double_clicked)
|
||||
self.skipButton.clicked.connect(self.skip_to_next)
|
||||
|
||||
self.update_data()
|
||||
|
||||
def update_data(self) -> None:
|
||||
|
||||
self.current_match_set = self.match_set_list[self.current_match_set_idx]
|
||||
|
||||
if self.current_match_set_idx + 1 == len(self.match_set_list):
|
||||
self.buttonBox.button(QtWidgets.QDialogButtonBox.StandardButton.Cancel).setDisabled(True)
|
||||
self.skipButton.setText("Skip")
|
||||
|
||||
self.set_cover_image()
|
||||
self.populate_table()
|
||||
self.twList.resizeColumnsToContents()
|
||||
self.twList.selectRow(0)
|
||||
|
||||
path = self.current_match_set.ca.path
|
||||
self.setWindowTitle(
|
||||
"Select correct match or skip ({} of {}): {}".format(
|
||||
self.current_match_set_idx + 1,
|
||||
len(self.match_set_list),
|
||||
os.path.split(path)[1],
|
||||
)
|
||||
)
|
||||
|
||||
def populate_table(self) -> None:
|
||||
if not self.current_match_set:
|
||||
return
|
||||
|
||||
while self.twList.rowCount() > 0:
|
||||
self.twList.removeRow(0)
|
||||
|
||||
self.twList.setSortingEnabled(False)
|
||||
|
||||
row = 0
|
||||
for match in self.current_match_set.matches:
|
||||
self.twList.insertRow(row)
|
||||
|
||||
item_text = match["series"]
|
||||
item = QtWidgets.QTableWidgetItem(item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.UserRole, (match,))
|
||||
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 0, item)
|
||||
|
||||
if match["publisher"] is not None:
|
||||
item_text = str(match["publisher"])
|
||||
else:
|
||||
item_text = "Unknown"
|
||||
item = QtWidgets.QTableWidgetItem(item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 1, item)
|
||||
|
||||
month_str = ""
|
||||
year_str = "????"
|
||||
if match["month"] is not None:
|
||||
month_str = f"-{int(match['month']):02d}"
|
||||
if match["year"] is not None:
|
||||
year_str = str(match["year"])
|
||||
|
||||
item_text = year_str + month_str
|
||||
item = QtWidgets.QTableWidgetItem(item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 2, item)
|
||||
|
||||
item_text = match["issue_title"]
|
||||
if item_text is None:
|
||||
item_text = ""
|
||||
item = QtWidgets.QTableWidgetItem(item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 3, item)
|
||||
|
||||
row += 1
|
||||
|
||||
self.twList.resizeColumnsToContents()
|
||||
self.twList.setSortingEnabled(True)
|
||||
self.twList.sortItems(2, QtCore.Qt.SortOrder.AscendingOrder)
|
||||
self.twList.selectRow(0)
|
||||
self.twList.resizeColumnsToContents()
|
||||
self.twList.horizontalHeader().setStretchLastSection(True)
|
||||
|
||||
def cell_double_clicked(self, r: int, c: int) -> None:
|
||||
self.accept()
|
||||
|
||||
def current_item_changed(self, curr: QtCore.QModelIndex, prev: QtCore.QModelIndex) -> None:
|
||||
|
||||
if curr is None:
|
||||
return None
|
||||
if prev is not None and prev.row() == curr.row():
|
||||
return None
|
||||
|
||||
self.altCoverWidget.set_issue_id(self.current_match()["issue_id"])
|
||||
if self.current_match()["description"] is None:
|
||||
self.teDescription.setText("")
|
||||
else:
|
||||
self.teDescription.setText(self.current_match()["description"])
|
||||
|
||||
def set_cover_image(self) -> None:
|
||||
ca = self.current_match_set.ca
|
||||
self.archiveCoverWidget.set_archive(ca)
|
||||
|
||||
def current_match(self) -> IssueResult:
|
||||
row = self.twList.currentRow()
|
||||
match: IssueResult = self.twList.item(row, 0).data(QtCore.Qt.ItemDataRole.UserRole)[0]
|
||||
return match
|
||||
|
||||
def accept(self) -> None:
|
||||
|
||||
self.save_match()
|
||||
self.current_match_set_idx += 1
|
||||
|
||||
if self.current_match_set_idx == len(self.match_set_list):
|
||||
# no more items
|
||||
QtWidgets.QDialog.accept(self)
|
||||
else:
|
||||
self.update_data()
|
||||
|
||||
def skip_to_next(self) -> None:
|
||||
self.current_match_set_idx += 1
|
||||
|
||||
if self.current_match_set_idx == len(self.match_set_list):
|
||||
# no more items
|
||||
QtWidgets.QDialog.reject(self)
|
||||
else:
|
||||
self.update_data()
|
||||
|
||||
def reject(self) -> None:
|
||||
reply = QtWidgets.QMessageBox.question(
|
||||
self,
|
||||
"Cancel Matching",
|
||||
"Are you sure you wish to cancel the matching process?",
|
||||
QtWidgets.QMessageBox.StandardButton.Yes,
|
||||
QtWidgets.QMessageBox.StandardButton.No,
|
||||
)
|
||||
|
||||
if reply == QtWidgets.QMessageBox.StandardButton.No:
|
||||
return
|
||||
|
||||
QtWidgets.QDialog.reject(self)
|
||||
|
||||
def save_match(self) -> None:
|
||||
|
||||
match = self.current_match()
|
||||
ca = self.current_match_set.ca
|
||||
|
||||
md = ca.read_metadata(self._style)
|
||||
if md.is_empty:
|
||||
md = ca.metadata_from_filename(
|
||||
self.settings.complicated_parser,
|
||||
self.settings.remove_c2c,
|
||||
self.settings.remove_fcbd,
|
||||
self.settings.remove_publisher,
|
||||
)
|
||||
|
||||
# now get the particular issue data
|
||||
cv_md = self.fetch_func(match)
|
||||
if cv_md is None:
|
||||
QtWidgets.QMessageBox.critical(
|
||||
self, "Network Issue", "Could not connect to Comic Vine to get issue details!"
|
||||
)
|
||||
return
|
||||
|
||||
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
|
||||
md.overlay(cv_md)
|
||||
success = ca.write_metadata(md, self._style)
|
||||
ca.load_cache([MetaDataStyle.CBI, MetaDataStyle.CIX])
|
||||
|
||||
QtWidgets.QApplication.restoreOverrideCursor()
|
||||
|
||||
if not success:
|
||||
QtWidgets.QMessageBox.warning(self, "Write Error", "Saving the tags to the archive seemed to fail!")
|
||||
70
comictaggerlib/autotagprogresswindow.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""A PyQT4 dialog to show ID log and progress"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
from PyQt5 import QtCore, QtWidgets, uic
|
||||
|
||||
from comictaggerlib.coverimagewidget import CoverImageWidget
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
from comictaggerlib.ui.qtutils import reduce_widget_font_size
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AutoTagProgressWindow(QtWidgets.QDialog):
|
||||
def __init__(self, parent: QtWidgets.QWidget) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("autotagprogresswindow.ui"), self)
|
||||
|
||||
self.archiveCoverWidget = CoverImageWidget(self.archiveCoverContainer, CoverImageWidget.DataMode, False)
|
||||
gridlayout = QtWidgets.QGridLayout(self.archiveCoverContainer)
|
||||
gridlayout.addWidget(self.archiveCoverWidget)
|
||||
gridlayout.setContentsMargins(0, 0, 0, 0)
|
||||
|
||||
self.testCoverWidget = CoverImageWidget(self.testCoverContainer, CoverImageWidget.DataMode, False)
|
||||
gridlayout = QtWidgets.QGridLayout(self.testCoverContainer)
|
||||
gridlayout.addWidget(self.testCoverWidget)
|
||||
gridlayout.setContentsMargins(0, 0, 0, 0)
|
||||
|
||||
self.isdone = False
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowType(
|
||||
self.windowFlags()
|
||||
| QtCore.Qt.WindowType.WindowSystemMenuHint
|
||||
| QtCore.Qt.WindowType.WindowMaximizeButtonHint
|
||||
)
|
||||
)
|
||||
|
||||
reduce_widget_font_size(self.textEdit)
|
||||
|
||||
def set_archive_image(self, img_data: bytes) -> None:
|
||||
self.set_cover_image(img_data, self.archiveCoverWidget)
|
||||
|
||||
def set_test_image(self, img_data: bytes) -> None:
|
||||
self.set_cover_image(img_data, self.testCoverWidget)
|
||||
|
||||
def set_cover_image(self, img_data: bytes, widget: CoverImageWidget) -> None:
|
||||
widget.set_image_data(img_data)
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
|
||||
def reject(self) -> None:
|
||||
QtWidgets.QDialog.reject(self)
|
||||
self.isdone = True
|
||||
109
comictaggerlib/autotagstartwindow.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""A PyQT4 dialog to confirm and set options for auto-tag"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets, uic
|
||||
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AutoTagStartWindow(QtWidgets.QDialog):
|
||||
def __init__(self, parent: QtWidgets.QWidget, settings: ComicTaggerSettings, msg: str) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("autotagstartwindow.ui"), self)
|
||||
self.label.setText(msg)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowType(self.windowFlags() & ~QtCore.Qt.WindowType.WindowContextHelpButtonHint)
|
||||
)
|
||||
|
||||
self.settings = settings
|
||||
|
||||
self.cbxSpecifySearchString.setChecked(False)
|
||||
self.cbxSplitWords.setChecked(False)
|
||||
self.leNameLengthMatchTolerance.setText(str(self.settings.id_length_delta_thresh))
|
||||
self.leSearchString.setEnabled(False)
|
||||
|
||||
self.cbxSaveOnLowConfidence.setChecked(self.settings.save_on_low_confidence)
|
||||
self.cbxDontUseYear.setChecked(self.settings.dont_use_year_when_identifying)
|
||||
self.cbxAssumeIssueOne.setChecked(self.settings.assume_1_if_no_issue_num)
|
||||
self.cbxIgnoreLeadingDigitsInFilename.setChecked(self.settings.ignore_leading_numbers_in_filename)
|
||||
self.cbxRemoveAfterSuccess.setChecked(self.settings.remove_archive_after_successful_match)
|
||||
self.cbxWaitForRateLimit.setChecked(self.settings.wait_and_retry_on_rate_limit)
|
||||
self.cbxAutoImprint.setChecked(self.settings.auto_imprint)
|
||||
|
||||
nlmt_tip = """ <html>The <b>Name Length Match Tolerance</b> is for eliminating automatic
|
||||
search matches that are too long compared to your series name search. The higher
|
||||
it is, the more likely to have a good match, but each search will take longer and
|
||||
use more bandwidth. Too low, and only the very closest lexical matches will be
|
||||
explored.</html>"""
|
||||
|
||||
self.leNameLengthMatchTolerance.setToolTip(nlmt_tip)
|
||||
|
||||
ss_tip = """<html>
|
||||
The <b>series search string</b> specifies the search string to be used for all selected archives.
|
||||
Use this when trying to match archives with hard-to-parse or incorrect filenames. All archives selected
|
||||
should be from the same series.
|
||||
</html>"""
|
||||
self.leSearchString.setToolTip(ss_tip)
|
||||
self.cbxSpecifySearchString.setToolTip(ss_tip)
|
||||
|
||||
validator = QtGui.QIntValidator(0, 99, self)
|
||||
self.leNameLengthMatchTolerance.setValidator(validator)
|
||||
|
||||
self.cbxSpecifySearchString.stateChanged.connect(self.search_string_toggle)
|
||||
|
||||
self.auto_save_on_low = False
|
||||
self.dont_use_year = False
|
||||
self.assume_issue_one = False
|
||||
self.ignore_leading_digits_in_filename = False
|
||||
self.remove_after_success = False
|
||||
self.wait_and_retry_on_rate_limit = False
|
||||
self.search_string = ""
|
||||
self.name_length_match_tolerance = self.settings.id_length_delta_thresh
|
||||
self.split_words = self.cbxSplitWords.isChecked()
|
||||
|
||||
def search_string_toggle(self) -> None:
|
||||
enable = self.cbxSpecifySearchString.isChecked()
|
||||
self.leSearchString.setEnabled(enable)
|
||||
|
||||
def accept(self) -> None:
|
||||
QtWidgets.QDialog.accept(self)
|
||||
|
||||
self.auto_save_on_low = self.cbxSaveOnLowConfidence.isChecked()
|
||||
self.dont_use_year = self.cbxDontUseYear.isChecked()
|
||||
self.assume_issue_one = self.cbxAssumeIssueOne.isChecked()
|
||||
self.ignore_leading_digits_in_filename = self.cbxIgnoreLeadingDigitsInFilename.isChecked()
|
||||
self.remove_after_success = self.cbxRemoveAfterSuccess.isChecked()
|
||||
self.name_length_match_tolerance = int(self.leNameLengthMatchTolerance.text())
|
||||
self.wait_and_retry_on_rate_limit = self.cbxWaitForRateLimit.isChecked()
|
||||
self.split_words = self.cbxSplitWords.isChecked()
|
||||
|
||||
# persist some settings
|
||||
self.settings.save_on_low_confidence = self.auto_save_on_low
|
||||
self.settings.dont_use_year_when_identifying = self.dont_use_year
|
||||
self.settings.assume_1_if_no_issue_num = self.assume_issue_one
|
||||
self.settings.ignore_leading_numbers_in_filename = self.ignore_leading_digits_in_filename
|
||||
self.settings.remove_archive_after_successful_match = self.remove_after_success
|
||||
self.settings.wait_and_retry_on_rate_limit = self.wait_and_retry_on_rate_limit
|
||||
|
||||
if self.cbxSpecifySearchString.isChecked():
|
||||
self.search_string = self.leSearchString.text()
|
||||
100
comictaggerlib/cbltransformer.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""A class to manage modifying metadata specifically for CBL/CBI"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from comicapi.genericmetadata import CreditMetadata, GenericMetadata
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CBLTransformer:
|
||||
def __init__(self, metadata: GenericMetadata, settings: ComicTaggerSettings) -> None:
|
||||
self.metadata = metadata
|
||||
self.settings = settings
|
||||
|
||||
def apply(self) -> GenericMetadata:
|
||||
# helper funcs
|
||||
def append_to_tags_if_unique(item: str) -> None:
|
||||
if item.lower() not in (tag.lower() for tag in self.metadata.tags):
|
||||
self.metadata.tags.append(item)
|
||||
|
||||
def add_string_list_to_tags(str_list: Optional[str]) -> None:
|
||||
if str_list:
|
||||
items = [s.strip() for s in str_list.split(",")]
|
||||
for item in items:
|
||||
append_to_tags_if_unique(item)
|
||||
|
||||
if self.settings.assume_lone_credit_is_primary:
|
||||
|
||||
# helper
|
||||
def set_lone_primary(role_list: list[str]) -> tuple[Optional[CreditMetadata], int]:
|
||||
lone_credit: Optional[CreditMetadata] = None
|
||||
count = 0
|
||||
for c in self.metadata.credits:
|
||||
if c["role"].lower() in role_list:
|
||||
count += 1
|
||||
lone_credit = c
|
||||
if count > 1:
|
||||
lone_credit = None
|
||||
break
|
||||
if lone_credit is not None:
|
||||
lone_credit["primary"] = True
|
||||
return lone_credit, count
|
||||
|
||||
# need to loop three times, once for 'writer', 'artist', and then
|
||||
# 'penciler' if no artist
|
||||
set_lone_primary(["writer"])
|
||||
c, count = set_lone_primary(["artist"])
|
||||
if c is None and count == 0:
|
||||
c, count = set_lone_primary(["penciler", "penciller"])
|
||||
if c is not None:
|
||||
c["primary"] = False
|
||||
self.metadata.add_credit(c["person"], "Artist", True)
|
||||
|
||||
if self.settings.copy_characters_to_tags:
|
||||
add_string_list_to_tags(self.metadata.characters)
|
||||
|
||||
if self.settings.copy_teams_to_tags:
|
||||
add_string_list_to_tags(self.metadata.teams)
|
||||
|
||||
if self.settings.copy_locations_to_tags:
|
||||
add_string_list_to_tags(self.metadata.locations)
|
||||
|
||||
if self.settings.copy_storyarcs_to_tags:
|
||||
add_string_list_to_tags(self.metadata.story_arc)
|
||||
|
||||
if self.settings.copy_notes_to_comments:
|
||||
if self.metadata.notes is not None:
|
||||
if self.metadata.comments is None:
|
||||
self.metadata.comments = ""
|
||||
else:
|
||||
self.metadata.comments += "\n\n"
|
||||
if self.metadata.notes not in self.metadata.comments:
|
||||
self.metadata.comments += self.metadata.notes
|
||||
|
||||
if self.settings.copy_weblink_to_comments:
|
||||
if self.metadata.web_link is not None:
|
||||
if self.metadata.comments is None:
|
||||
self.metadata.comments = ""
|
||||
else:
|
||||
self.metadata.comments += "\n\n"
|
||||
if self.metadata.web_link not in self.metadata.comments:
|
||||
self.metadata.comments += self.metadata.web_link
|
||||
|
||||
return self.metadata
|
||||
571
comictaggerlib/cli.py
Normal file
@@ -0,0 +1,571 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
"""ComicTagger CLI functions"""
|
||||
|
||||
# Copyright 2013 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pprint import pprint
|
||||
|
||||
from comicapi import utils
|
||||
from comicapi.comicarchive import ComicArchive, MetaDataStyle
|
||||
from comicapi.genericmetadata import GenericMetadata
|
||||
from comictaggerlib.cbltransformer import CBLTransformer
|
||||
from comictaggerlib.comicvinetalker import ComicVineTalker, ComicVineTalkerException
|
||||
from comictaggerlib.filerenamer import FileRenamer
|
||||
from comictaggerlib.issueidentifier import IssueIdentifier
|
||||
from comictaggerlib.resulttypes import IssueResult, MultipleMatch, OnlineMatchResults
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def actual_issue_data_fetch(
|
||||
match: IssueResult, settings: ComicTaggerSettings, opts: argparse.Namespace
|
||||
) -> GenericMetadata:
|
||||
# now get the particular issue data
|
||||
try:
|
||||
comic_vine = ComicVineTalker()
|
||||
comic_vine.wait_for_rate_limit = opts.wait_on_cv_rate_limit
|
||||
cv_md = comic_vine.fetch_issue_data(match["volume_id"], match["issue_number"], settings)
|
||||
except ComicVineTalkerException:
|
||||
logger.exception("Network error while getting issue details. Save aborted")
|
||||
return GenericMetadata()
|
||||
|
||||
if settings.apply_cbl_transform_on_cv_import:
|
||||
cv_md = CBLTransformer(cv_md, settings).apply()
|
||||
|
||||
return cv_md
|
||||
|
||||
|
||||
def actual_metadata_save(ca: ComicArchive, opts: argparse.Namespace, md: GenericMetadata) -> bool:
|
||||
if not opts.dryrun:
|
||||
# write out the new data
|
||||
if not ca.write_metadata(md, opts.type if opts.type is not None else 0):
|
||||
logger.error("The tag save seemed to fail!")
|
||||
return False
|
||||
|
||||
print("Save complete.")
|
||||
logger.info("Save complete.")
|
||||
else:
|
||||
if opts.terse:
|
||||
logger.info("dry-run option was set, so nothing was written")
|
||||
print("dry-run option was set, so nothing was written")
|
||||
else:
|
||||
logger.info("dry-run option was set, so nothing was written, but here is the final set of tags:")
|
||||
print("dry-run option was set, so nothing was written, but here is the final set of tags:")
|
||||
print(f"{md}")
|
||||
return True
|
||||
|
||||
|
||||
def display_match_set_for_choice(
|
||||
label: str, match_set: MultipleMatch, opts: argparse.Namespace, settings: ComicTaggerSettings
|
||||
) -> None:
|
||||
print(f"{match_set.ca.path} -- {label}:")
|
||||
|
||||
# sort match list by year
|
||||
match_set.matches.sort(key=lambda k: k["year"] or 0)
|
||||
|
||||
for (counter, m) in enumerate(match_set.matches):
|
||||
counter += 1
|
||||
print(
|
||||
" {}. {} #{} [{}] ({}/{}) - {}".format(
|
||||
counter,
|
||||
m["series"],
|
||||
m["issue_number"],
|
||||
m["publisher"],
|
||||
m["month"],
|
||||
m["year"],
|
||||
m["issue_title"],
|
||||
)
|
||||
)
|
||||
if opts.interactive:
|
||||
while True:
|
||||
i = input("Choose a match #, or 's' to skip: ")
|
||||
if (i.isdigit() and int(i) in range(1, len(match_set.matches) + 1)) or i == "s":
|
||||
break
|
||||
if i != "s":
|
||||
# save the data!
|
||||
# we know at this point, that the file is all good to go
|
||||
ca = match_set.ca
|
||||
md = create_local_metadata(opts, ca, ca.has_metadata(opts.type if opts.type is not None else 0), settings)
|
||||
cv_md = actual_issue_data_fetch(match_set.matches[int(i) - 1], settings, opts)
|
||||
if opts.overwrite:
|
||||
md = cv_md
|
||||
else:
|
||||
md.overlay(cv_md)
|
||||
|
||||
if opts.auto_imprint:
|
||||
md.fix_publisher()
|
||||
|
||||
actual_metadata_save(ca, opts, md)
|
||||
|
||||
|
||||
def post_process_matches(
|
||||
match_results: OnlineMatchResults, opts: argparse.Namespace, settings: ComicTaggerSettings
|
||||
) -> None:
|
||||
# now go through the match results
|
||||
if opts.show_save_summary:
|
||||
if len(match_results.good_matches) > 0:
|
||||
print("\nSuccessful matches:\n------------------")
|
||||
for f in match_results.good_matches:
|
||||
print(f)
|
||||
|
||||
if len(match_results.no_matches) > 0:
|
||||
print("\nNo matches:\n------------------")
|
||||
for f in match_results.no_matches:
|
||||
print(f)
|
||||
|
||||
if len(match_results.write_failures) > 0:
|
||||
print("\nFile Write Failures:\n------------------")
|
||||
for f in match_results.write_failures:
|
||||
print(f)
|
||||
|
||||
if len(match_results.fetch_data_failures) > 0:
|
||||
print("\nNetwork Data Fetch Failures:\n------------------")
|
||||
for f in match_results.fetch_data_failures:
|
||||
print(f)
|
||||
|
||||
if not opts.show_save_summary and not opts.interactive:
|
||||
# just quit if we're not interactive or showing the summary
|
||||
return
|
||||
|
||||
if len(match_results.multiple_matches) > 0:
|
||||
print("\nArchives with multiple high-confidence matches:\n------------------")
|
||||
for match_set in match_results.multiple_matches:
|
||||
display_match_set_for_choice("Multiple high-confidence matches", match_set, opts, settings)
|
||||
|
||||
if len(match_results.low_confidence_matches) > 0:
|
||||
print("\nArchives with low-confidence matches:\n------------------")
|
||||
for match_set in match_results.low_confidence_matches:
|
||||
if len(match_set.matches) == 1:
|
||||
label = "Single low-confidence match"
|
||||
else:
|
||||
label = "Multiple low-confidence matches"
|
||||
|
||||
display_match_set_for_choice(label, match_set, opts, settings)
|
||||
|
||||
|
||||
def cli_mode(opts: argparse.Namespace, settings: ComicTaggerSettings) -> None:
|
||||
if len(opts.files) < 1:
|
||||
logger.error("You must specify at least one filename. Use the -h option for more info")
|
||||
return
|
||||
|
||||
match_results = OnlineMatchResults()
|
||||
|
||||
for f in opts.files:
|
||||
process_file_cli(f, opts, settings, match_results)
|
||||
sys.stdout.flush()
|
||||
|
||||
post_process_matches(match_results, opts, settings)
|
||||
|
||||
|
||||
def create_local_metadata(
|
||||
opts: argparse.Namespace, ca: ComicArchive, has_desired_tags: bool, settings: ComicTaggerSettings
|
||||
) -> GenericMetadata:
|
||||
md = GenericMetadata()
|
||||
md.set_default_page_list(ca.get_number_of_pages())
|
||||
|
||||
# now, overlay the parsed filename info
|
||||
if opts.parse_filename:
|
||||
f_md = ca.metadata_from_filename(
|
||||
settings.complicated_parser,
|
||||
settings.remove_c2c,
|
||||
settings.remove_fcbd,
|
||||
settings.remove_publisher,
|
||||
opts.split_words,
|
||||
)
|
||||
if opts.overwrite:
|
||||
md = f_md
|
||||
else:
|
||||
md.overlay(f_md)
|
||||
|
||||
if has_desired_tags:
|
||||
t_md = ca.read_metadata(opts.type if opts.type is not None else 0)
|
||||
md.overlay(t_md)
|
||||
|
||||
# finally, use explicit stuff
|
||||
if opts.overwrite and not opts.metadata.is_empty:
|
||||
md = opts.metadata
|
||||
else:
|
||||
md.overlay(opts.metadata)
|
||||
|
||||
return md
|
||||
|
||||
|
||||
def process_file_cli(
|
||||
filename: str, opts: argparse.Namespace, settings: ComicTaggerSettings, match_results: OnlineMatchResults
|
||||
) -> None:
|
||||
batch_mode = len(opts.files) > 1
|
||||
|
||||
ca = ComicArchive(filename, settings.rar_exe_path, ComicTaggerSettings.get_graphic("nocover.png"))
|
||||
|
||||
if not os.path.lexists(filename):
|
||||
logger.error("Cannot find %s", filename)
|
||||
return
|
||||
|
||||
if not ca.seems_to_be_a_comic_archive():
|
||||
logger.error("Sorry, but %s is not a comic archive!", filename)
|
||||
return
|
||||
|
||||
if not ca.is_writable() and (opts.delete or opts.copy or opts.save or opts.rename):
|
||||
logger.error("This archive is not writable for that tag type")
|
||||
return
|
||||
|
||||
has = [False, False, False]
|
||||
if ca.has_cix():
|
||||
has[MetaDataStyle.CIX] = True
|
||||
if ca.has_cbi():
|
||||
has[MetaDataStyle.CBI] = True
|
||||
if ca.has_comet():
|
||||
has[MetaDataStyle.COMET] = True
|
||||
|
||||
if opts.print:
|
||||
|
||||
if opts.type is None:
|
||||
page_count = ca.get_number_of_pages()
|
||||
|
||||
brief = ""
|
||||
|
||||
if batch_mode:
|
||||
brief = f"{ca.path}: "
|
||||
|
||||
if ca.is_sevenzip():
|
||||
brief += "7Z archive "
|
||||
elif ca.is_zip():
|
||||
brief += "ZIP archive "
|
||||
elif ca.is_rar():
|
||||
brief += "RAR archive "
|
||||
elif ca.is_folder():
|
||||
brief += "Folder archive "
|
||||
|
||||
brief += f"({page_count: >3} pages)"
|
||||
brief += " tags:[ "
|
||||
|
||||
if not (has[MetaDataStyle.CBI] or has[MetaDataStyle.CIX] or has[MetaDataStyle.COMET]):
|
||||
brief += "none "
|
||||
else:
|
||||
if has[MetaDataStyle.CBI]:
|
||||
brief += "CBL "
|
||||
if has[MetaDataStyle.CIX]:
|
||||
brief += "CR "
|
||||
if has[MetaDataStyle.COMET]:
|
||||
brief += "CoMet "
|
||||
brief += "]"
|
||||
|
||||
print(brief)
|
||||
|
||||
if opts.terse:
|
||||
return
|
||||
|
||||
print()
|
||||
|
||||
if opts.type is None or opts.type == MetaDataStyle.CIX:
|
||||
if has[MetaDataStyle.CIX]:
|
||||
print("--------- ComicRack tags ---------")
|
||||
if opts.raw:
|
||||
print(ca.read_raw_cix())
|
||||
else:
|
||||
print(ca.read_cix())
|
||||
|
||||
if opts.type is None or opts.type == MetaDataStyle.CBI:
|
||||
if has[MetaDataStyle.CBI]:
|
||||
print("------- ComicBookLover tags -------")
|
||||
if opts.raw:
|
||||
pprint(json.loads(ca.read_raw_cbi()))
|
||||
else:
|
||||
print(ca.read_cbi())
|
||||
|
||||
if opts.type is None or opts.type == MetaDataStyle.COMET:
|
||||
if has[MetaDataStyle.COMET]:
|
||||
print("----------- CoMet tags -----------")
|
||||
if opts.raw:
|
||||
print(ca.read_raw_comet())
|
||||
else:
|
||||
print(ca.read_comet())
|
||||
|
||||
elif opts.delete:
|
||||
style_name = MetaDataStyle.name[opts.type]
|
||||
if has[opts.type]:
|
||||
if not opts.dryrun:
|
||||
if not ca.remove_metadata(opts.type):
|
||||
print(f"{filename}: Tag removal seemed to fail!")
|
||||
else:
|
||||
print(f"{filename}: Removed {style_name} tags.")
|
||||
else:
|
||||
print(f"{filename}: dry-run. {style_name} tags not removed")
|
||||
else:
|
||||
print(f"{filename}: This archive doesn't have {style_name} tags to remove.")
|
||||
|
||||
elif opts.copy:
|
||||
dst_style_name = MetaDataStyle.name[opts.type]
|
||||
if opts.no_overwrite and has[opts.type]:
|
||||
print(f"{filename}: Already has {dst_style_name} tags. Not overwriting.")
|
||||
return
|
||||
if opts.copy == opts.type:
|
||||
print(f"{filename}: Destination and source are same: {dst_style_name}. Nothing to do.")
|
||||
return
|
||||
|
||||
src_style_name = MetaDataStyle.name[opts.copy]
|
||||
if has[opts.copy]:
|
||||
if not opts.dryrun:
|
||||
md = ca.read_metadata(opts.copy)
|
||||
|
||||
if settings.apply_cbl_transform_on_bulk_operation and opts.type == MetaDataStyle.CBI:
|
||||
md = CBLTransformer(md, settings).apply()
|
||||
|
||||
if not ca.write_metadata(md, opts.type):
|
||||
print(f"{filename}: Tag copy seemed to fail!")
|
||||
else:
|
||||
print(f"{filename}: Copied {src_style_name} tags to {dst_style_name}.")
|
||||
else:
|
||||
print(f"{filename}: dry-run. {src_style_name} tags not copied")
|
||||
else:
|
||||
print(f"{filename}: This archive doesn't have {src_style_name} tags to copy.")
|
||||
|
||||
elif opts.save:
|
||||
|
||||
if opts.no_overwrite and has[opts.type]:
|
||||
print(f"{filename}: Already has {MetaDataStyle.name[opts.type]} tags. Not overwriting.")
|
||||
return
|
||||
|
||||
if batch_mode:
|
||||
print(f"Processing {ca.path}...")
|
||||
|
||||
md = create_local_metadata(opts, ca, has[opts.type], settings)
|
||||
if md.issue is None or md.issue == "":
|
||||
if opts.assume_issue_one:
|
||||
md.issue = "1"
|
||||
|
||||
# now, search online
|
||||
if opts.online:
|
||||
if opts.id is not None:
|
||||
# we were given the actual ID to search with
|
||||
try:
|
||||
comic_vine = ComicVineTalker()
|
||||
comic_vine.wait_for_rate_limit = opts.wait_on_cv_rate_limit
|
||||
cv_md = comic_vine.fetch_issue_data_by_issue_id(opts.id, settings)
|
||||
except ComicVineTalkerException:
|
||||
logger.exception("Network error while getting issue details. Save aborted")
|
||||
match_results.fetch_data_failures.append(str(ca.path.absolute()))
|
||||
return
|
||||
|
||||
if cv_md is None:
|
||||
logger.error("No match for ID %s was found.", opts.id)
|
||||
match_results.no_matches.append(str(ca.path.absolute()))
|
||||
return
|
||||
|
||||
if settings.apply_cbl_transform_on_cv_import:
|
||||
cv_md = CBLTransformer(cv_md, settings).apply()
|
||||
else:
|
||||
ii = IssueIdentifier(ca, settings)
|
||||
|
||||
if md is None or md.is_empty:
|
||||
logger.error("No metadata given to search online with!")
|
||||
match_results.no_matches.append(str(ca.path.absolute()))
|
||||
return
|
||||
|
||||
def myoutput(text: str) -> None:
|
||||
if opts.verbose:
|
||||
IssueIdentifier.default_write_output(text)
|
||||
|
||||
# use our overlayed MD struct to search
|
||||
ii.set_additional_metadata(md)
|
||||
ii.only_use_additional_meta_data = True
|
||||
ii.wait_and_retry_on_rate_limit = opts.wait_on_cv_rate_limit
|
||||
ii.set_output_function(myoutput)
|
||||
ii.cover_page_index = md.get_cover_page_index_list()[0]
|
||||
matches = ii.search()
|
||||
|
||||
result = ii.search_result
|
||||
|
||||
found_match = False
|
||||
choices = False
|
||||
low_confidence = False
|
||||
|
||||
if result == ii.result_no_matches:
|
||||
pass
|
||||
elif result == ii.result_found_match_but_bad_cover_score:
|
||||
low_confidence = True
|
||||
found_match = True
|
||||
elif result == ii.result_found_match_but_not_first_page:
|
||||
found_match = True
|
||||
elif result == ii.result_multiple_matches_with_bad_image_scores:
|
||||
low_confidence = True
|
||||
choices = True
|
||||
elif result == ii.result_one_good_match:
|
||||
found_match = True
|
||||
elif result == ii.result_multiple_good_matches:
|
||||
choices = True
|
||||
|
||||
if choices:
|
||||
if low_confidence:
|
||||
logger.error("Online search: Multiple low confidence matches. Save aborted")
|
||||
match_results.low_confidence_matches.append(MultipleMatch(ca, matches))
|
||||
return
|
||||
|
||||
logger.error("Online search: Multiple good matches. Save aborted")
|
||||
match_results.multiple_matches.append(MultipleMatch(ca, matches))
|
||||
return
|
||||
if low_confidence and opts.abort_on_low_confidence:
|
||||
logger.error("Online search: Low confidence match. Save aborted")
|
||||
match_results.low_confidence_matches.append(MultipleMatch(ca, matches))
|
||||
return
|
||||
if not found_match:
|
||||
logger.error("Online search: No match found. Save aborted")
|
||||
match_results.no_matches.append(str(ca.path.absolute()))
|
||||
return
|
||||
|
||||
# we got here, so we have a single match
|
||||
|
||||
# now get the particular issue data
|
||||
cv_md = actual_issue_data_fetch(matches[0], settings, opts)
|
||||
if not cv_md.is_empty:
|
||||
match_results.fetch_data_failures.append(str(ca.path.absolute()))
|
||||
return
|
||||
|
||||
if opts.overwrite:
|
||||
md = cv_md
|
||||
else:
|
||||
md.overlay(cv_md)
|
||||
|
||||
if opts.auto_imprint:
|
||||
md.fix_publisher()
|
||||
|
||||
# ok, done building our metadata. time to save
|
||||
if not actual_metadata_save(ca, opts, md):
|
||||
match_results.write_failures.append(str(ca.path.absolute()))
|
||||
else:
|
||||
match_results.good_matches.append(str(ca.path.absolute()))
|
||||
|
||||
elif opts.rename:
|
||||
|
||||
msg_hdr = ""
|
||||
if batch_mode:
|
||||
msg_hdr = f"{ca.path}: "
|
||||
|
||||
if opts.type is not None:
|
||||
use_tags = has[opts.type]
|
||||
else:
|
||||
use_tags = False
|
||||
|
||||
md = create_local_metadata(opts, ca, use_tags, settings)
|
||||
|
||||
if md.series is None:
|
||||
logger.error(msg_hdr + "Can't rename without series name")
|
||||
return
|
||||
|
||||
new_ext = "" # default
|
||||
if settings.rename_extension_based_on_archive:
|
||||
if ca.is_sevenzip():
|
||||
new_ext = ".cb7"
|
||||
elif ca.is_zip():
|
||||
new_ext = ".cbz"
|
||||
elif ca.is_rar():
|
||||
new_ext = ".cbr"
|
||||
|
||||
renamer = FileRenamer(md, platform="universal" if settings.rename_strict else "auto")
|
||||
renamer.set_template(settings.rename_template)
|
||||
renamer.set_issue_zero_padding(settings.rename_issue_number_padding)
|
||||
renamer.set_smart_cleanup(settings.rename_use_smart_string_cleanup)
|
||||
renamer.move = settings.rename_move_dir
|
||||
|
||||
try:
|
||||
new_name = renamer.determine_name(ext=new_ext)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
msg_hdr + "Invalid format string!\n"
|
||||
"Your rename template is invalid!\n\n"
|
||||
"Please consult the template help in the settings "
|
||||
"and the documentation on the format at "
|
||||
"https://docs.python.org/3/library/string.html#format-string-syntax"
|
||||
)
|
||||
return
|
||||
|
||||
folder = os.path.dirname(os.path.abspath(filename))
|
||||
if settings.rename_move_dir and len(settings.rename_dir.strip()) > 3:
|
||||
folder = settings.rename_dir.strip()
|
||||
|
||||
new_abs_path = utils.unique_file(os.path.join(folder, new_name))
|
||||
|
||||
if os.path.join(folder, new_name) == os.path.abspath(filename):
|
||||
print(msg_hdr + "Filename is already good!", file=sys.stderr)
|
||||
return
|
||||
|
||||
suffix = ""
|
||||
if not opts.dryrun:
|
||||
# rename the file
|
||||
os.makedirs(os.path.dirname(new_abs_path), 0o777, True)
|
||||
os.rename(filename, new_abs_path)
|
||||
else:
|
||||
suffix = " (dry-run, no change)"
|
||||
|
||||
print(f"renamed '{os.path.basename(ca.path)}' -> '{new_name}' {suffix}")
|
||||
|
||||
elif opts.export_to_zip:
|
||||
msg_hdr = ""
|
||||
if batch_mode:
|
||||
msg_hdr = f"{ca.path}: "
|
||||
|
||||
if not ca.is_rar():
|
||||
logger.error(msg_hdr + "Archive is not a RAR.")
|
||||
return
|
||||
|
||||
rar_file = os.path.abspath(os.path.abspath(filename))
|
||||
new_file = os.path.splitext(rar_file)[0] + ".cbz"
|
||||
|
||||
if opts.abort_on_conflict and os.path.lexists(new_file):
|
||||
print(msg_hdr + f"{os.path.split(new_file)[1]} already exists in the that folder.")
|
||||
return
|
||||
|
||||
new_file = utils.unique_file(os.path.join(new_file))
|
||||
|
||||
delete_success = False
|
||||
export_success = False
|
||||
if not opts.dryrun:
|
||||
if ca.export_as_zip(new_file):
|
||||
export_success = True
|
||||
if opts.delete_after_zip_export:
|
||||
try:
|
||||
os.unlink(rar_file)
|
||||
except:
|
||||
logger.exception(msg_hdr + "Error deleting original RAR after export")
|
||||
delete_success = False
|
||||
else:
|
||||
delete_success = True
|
||||
else:
|
||||
# last export failed, so remove the zip, if it exists
|
||||
if os.path.lexists(new_file):
|
||||
os.remove(new_file)
|
||||
else:
|
||||
msg = msg_hdr + f"Dry-run: Would try to create {os.path.split(new_file)[1]}"
|
||||
if opts.delete_after_zip_export:
|
||||
msg += " and delete orginal."
|
||||
print(msg)
|
||||
return
|
||||
|
||||
msg = msg_hdr
|
||||
if export_success:
|
||||
msg += f"Archive exported successfully to: {os.path.split(new_file)[1]}"
|
||||
if opts.delete_after_zip_export and delete_success:
|
||||
msg += " (Original deleted) "
|
||||
else:
|
||||
msg += "Archive failed to export!"
|
||||
|
||||
print(msg)
|
||||
441
comictaggerlib/comicvinecacher.py
Normal file
@@ -0,0 +1,441 @@
|
||||
"""A python class to manage caching of data from Comic Vine"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import sqlite3 as lite
|
||||
from typing import Any, Optional
|
||||
|
||||
from comicapi import utils
|
||||
from comictaggerlib import ctversion
|
||||
from comictaggerlib.resulttypes import CVIssuesResults, CVVolumeResults, SelectDetails
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ComicVineCacher:
|
||||
def __init__(self) -> None:
|
||||
self.settings_folder = ComicTaggerSettings.get_settings_folder()
|
||||
self.db_file = os.path.join(self.settings_folder, "cv_cache.db")
|
||||
self.version_file = os.path.join(self.settings_folder, "cache_version.txt")
|
||||
|
||||
# verify that cache is from same version as this one
|
||||
data = ""
|
||||
try:
|
||||
with open(self.version_file, "rb") as f:
|
||||
data = f.read().decode("utf-8")
|
||||
f.close()
|
||||
except:
|
||||
pass
|
||||
if data != ctversion.version:
|
||||
self.clear_cache()
|
||||
|
||||
if not os.path.exists(self.db_file):
|
||||
self.create_cache_db()
|
||||
|
||||
def clear_cache(self) -> None:
|
||||
try:
|
||||
os.unlink(self.db_file)
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
os.unlink(self.version_file)
|
||||
except:
|
||||
pass
|
||||
|
||||
def create_cache_db(self) -> None:
|
||||
|
||||
# create the version file
|
||||
with open(self.version_file, "w", encoding="utf-8") as f:
|
||||
f.write(ctversion.version)
|
||||
|
||||
# this will wipe out any existing version
|
||||
open(self.db_file, "wb").close()
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
|
||||
# create tables
|
||||
with con:
|
||||
cur = con.cursor()
|
||||
# name,id,start_year,publisher,image,description,count_of_issues
|
||||
cur.execute(
|
||||
"CREATE TABLE VolumeSearchCache("
|
||||
+ "search_term TEXT,"
|
||||
+ "id INT,"
|
||||
+ "name TEXT,"
|
||||
+ "start_year INT,"
|
||||
+ "publisher TEXT,"
|
||||
+ "count_of_issues INT,"
|
||||
+ "image_url TEXT,"
|
||||
+ "description TEXT,"
|
||||
+ "timestamp DATE DEFAULT (datetime('now','localtime'))) "
|
||||
)
|
||||
|
||||
cur.execute(
|
||||
"CREATE TABLE Volumes("
|
||||
+ "id INT,"
|
||||
+ "name TEXT,"
|
||||
+ "publisher TEXT,"
|
||||
+ "count_of_issues INT,"
|
||||
+ "start_year INT,"
|
||||
+ "timestamp DATE DEFAULT (datetime('now','localtime')), "
|
||||
+ "PRIMARY KEY (id))"
|
||||
)
|
||||
|
||||
cur.execute(
|
||||
"CREATE TABLE AltCovers("
|
||||
+ "issue_id INT,"
|
||||
+ "url_list TEXT,"
|
||||
+ "timestamp DATE DEFAULT (datetime('now','localtime')), "
|
||||
+ "PRIMARY KEY (issue_id))"
|
||||
)
|
||||
|
||||
cur.execute(
|
||||
"CREATE TABLE Issues("
|
||||
+ "id INT,"
|
||||
+ "volume_id INT,"
|
||||
+ "name TEXT,"
|
||||
+ "issue_number TEXT,"
|
||||
+ "super_url TEXT,"
|
||||
+ "thumb_url TEXT,"
|
||||
+ "cover_date TEXT,"
|
||||
+ "site_detail_url TEXT,"
|
||||
+ "description TEXT,"
|
||||
+ "timestamp DATE DEFAULT (datetime('now','localtime')), "
|
||||
+ "PRIMARY KEY (id))"
|
||||
)
|
||||
|
||||
def add_search_results(self, search_term: str, cv_search_results: list[CVVolumeResults]) -> None:
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
|
||||
with con:
|
||||
con.text_factory = str
|
||||
cur = con.cursor()
|
||||
|
||||
# remove all previous entries with this search term
|
||||
cur.execute("DELETE FROM VolumeSearchCache WHERE search_term = ?", [search_term.lower()])
|
||||
|
||||
# now add in new results
|
||||
for record in cv_search_results:
|
||||
|
||||
if record["publisher"] is None:
|
||||
pub_name = ""
|
||||
else:
|
||||
pub_name = record["publisher"]["name"]
|
||||
|
||||
if record["image"] is None:
|
||||
url = ""
|
||||
else:
|
||||
url = record["image"]["super_url"]
|
||||
|
||||
cur.execute(
|
||||
"INSERT INTO VolumeSearchCache "
|
||||
+ "(search_term, id, name, start_year, publisher, count_of_issues, image_url, description) "
|
||||
+ "VALUES(?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(
|
||||
search_term.lower(),
|
||||
record["id"],
|
||||
record["name"],
|
||||
record["start_year"],
|
||||
pub_name,
|
||||
record["count_of_issues"],
|
||||
url,
|
||||
record["description"],
|
||||
),
|
||||
)
|
||||
|
||||
def get_search_results(self, search_term: str) -> list[CVVolumeResults]:
|
||||
|
||||
results = []
|
||||
con = lite.connect(self.db_file)
|
||||
with con:
|
||||
con.text_factory = str
|
||||
cur = con.cursor()
|
||||
|
||||
# purge stale search results
|
||||
a_day_ago = datetime.datetime.today() - datetime.timedelta(days=1)
|
||||
cur.execute("DELETE FROM VolumeSearchCache WHERE timestamp < ?", [str(a_day_ago)])
|
||||
|
||||
# fetch
|
||||
cur.execute("SELECT * FROM VolumeSearchCache WHERE search_term=?", [search_term.lower()])
|
||||
rows = cur.fetchall()
|
||||
# now process the results
|
||||
for record in rows:
|
||||
result = CVVolumeResults(
|
||||
{
|
||||
"id": record[1],
|
||||
"name": record[2],
|
||||
"start_year": record[3],
|
||||
"count_of_issues": record[5],
|
||||
"description": record[7],
|
||||
"publisher": {"name": record[4]},
|
||||
"image": {"super_url": record[6]},
|
||||
}
|
||||
)
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
def add_alt_covers(self, issue_id: int, url_list: list[str]) -> None:
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
|
||||
with con:
|
||||
con.text_factory = str
|
||||
cur = con.cursor()
|
||||
|
||||
# remove all previous entries with this search term
|
||||
cur.execute("DELETE FROM AltCovers WHERE issue_id = ?", [issue_id])
|
||||
|
||||
url_list_str = utils.list_to_string(url_list)
|
||||
# now add in new record
|
||||
cur.execute("INSERT INTO AltCovers (issue_id, url_list) VALUES(?, ?)", (issue_id, url_list_str))
|
||||
|
||||
def get_alt_covers(self, issue_id: int) -> list[str]:
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
with con:
|
||||
cur = con.cursor()
|
||||
con.text_factory = str
|
||||
|
||||
# purge stale issue info - probably issue data won't change
|
||||
# much....
|
||||
a_month_ago = datetime.datetime.today() - datetime.timedelta(days=30)
|
||||
cur.execute("DELETE FROM AltCovers WHERE timestamp < ?", [str(a_month_ago)])
|
||||
|
||||
cur.execute("SELECT url_list FROM AltCovers WHERE issue_id=?", [issue_id])
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
return []
|
||||
|
||||
url_list_str = row[0]
|
||||
if len(url_list_str) == 0:
|
||||
return []
|
||||
raw_list = url_list_str.split(",")
|
||||
url_list = []
|
||||
for item in raw_list:
|
||||
url_list.append(str(item).strip())
|
||||
return url_list
|
||||
|
||||
def add_volume_info(self, cv_volume_record: CVVolumeResults) -> None:
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
|
||||
with con:
|
||||
|
||||
cur = con.cursor()
|
||||
|
||||
timestamp = datetime.datetime.now()
|
||||
|
||||
if cv_volume_record["publisher"] is None:
|
||||
pub_name = ""
|
||||
else:
|
||||
pub_name = cv_volume_record["publisher"]["name"]
|
||||
|
||||
data = {
|
||||
"name": cv_volume_record["name"],
|
||||
"publisher": pub_name,
|
||||
"count_of_issues": cv_volume_record["count_of_issues"],
|
||||
"start_year": cv_volume_record["start_year"],
|
||||
"timestamp": timestamp,
|
||||
}
|
||||
self.upsert(cur, "volumes", "id", cv_volume_record["id"], data)
|
||||
|
||||
def add_volume_issues_info(self, volume_id: int, cv_volume_issues: list[CVIssuesResults]) -> None:
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
|
||||
with con:
|
||||
cur = con.cursor()
|
||||
|
||||
timestamp = datetime.datetime.now()
|
||||
|
||||
# add in issues
|
||||
|
||||
for issue in cv_volume_issues:
|
||||
data = {
|
||||
"volume_id": volume_id,
|
||||
"name": issue["name"],
|
||||
"issue_number": issue["issue_number"],
|
||||
"site_detail_url": issue["site_detail_url"],
|
||||
"cover_date": issue["cover_date"],
|
||||
"super_url": issue["image"]["super_url"],
|
||||
"thumb_url": issue["image"]["thumb_url"],
|
||||
"description": issue["description"],
|
||||
"timestamp": timestamp,
|
||||
}
|
||||
self.upsert(cur, "issues", "id", issue["id"], data)
|
||||
|
||||
def get_volume_info(self, volume_id: int) -> Optional[CVVolumeResults]:
|
||||
|
||||
result: Optional[CVVolumeResults] = None
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
with con:
|
||||
cur = con.cursor()
|
||||
con.text_factory = str
|
||||
|
||||
# purge stale volume info
|
||||
a_week_ago = datetime.datetime.today() - datetime.timedelta(days=7)
|
||||
cur.execute("DELETE FROM Volumes WHERE timestamp < ?", [str(a_week_ago)])
|
||||
|
||||
# fetch
|
||||
cur.execute("SELECT id,name,publisher,count_of_issues,start_year FROM Volumes WHERE id = ?", [volume_id])
|
||||
|
||||
row = cur.fetchone()
|
||||
|
||||
if row is None:
|
||||
return result
|
||||
|
||||
# since ID is primary key, there is only one row
|
||||
result = CVVolumeResults(
|
||||
{
|
||||
"id": row[0],
|
||||
"name": row[1],
|
||||
"count_of_issues": row[3],
|
||||
"start_year": row[4],
|
||||
"publisher": {"name": row[2]},
|
||||
}
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
def get_volume_issues_info(self, volume_id: int) -> list[CVIssuesResults]:
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
with con:
|
||||
cur = con.cursor()
|
||||
con.text_factory = str
|
||||
|
||||
# purge stale issue info - probably issue data won't change
|
||||
# much....
|
||||
a_week_ago = datetime.datetime.today() - datetime.timedelta(days=7)
|
||||
cur.execute("DELETE FROM Issues WHERE timestamp < ?", [str(a_week_ago)])
|
||||
|
||||
# fetch
|
||||
results: list[CVIssuesResults] = []
|
||||
|
||||
cur.execute(
|
||||
"SELECT id,name,issue_number,site_detail_url,cover_date,super_url,thumb_url,description FROM Issues WHERE volume_id = ?",
|
||||
[volume_id],
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
|
||||
# now process the results
|
||||
for row in rows:
|
||||
record = CVIssuesResults(
|
||||
{
|
||||
"id": row[0],
|
||||
"name": row[1],
|
||||
"issue_number": row[2],
|
||||
"site_detail_url": row[3],
|
||||
"cover_date": row[4],
|
||||
"image": {"super_url": row[5], "thumb_url": row[6]},
|
||||
"description": row[7],
|
||||
}
|
||||
)
|
||||
|
||||
results.append(record)
|
||||
|
||||
return results
|
||||
|
||||
def add_issue_select_details(
|
||||
self, issue_id: int, image_url: str, thumb_image_url: str, cover_date: str, site_detail_url: str
|
||||
) -> None:
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
|
||||
with con:
|
||||
cur = con.cursor()
|
||||
con.text_factory = str
|
||||
timestamp = datetime.datetime.now()
|
||||
|
||||
data = {
|
||||
"super_url": image_url,
|
||||
"thumb_url": thumb_image_url,
|
||||
"cover_date": cover_date,
|
||||
"site_detail_url": site_detail_url,
|
||||
"timestamp": timestamp,
|
||||
}
|
||||
self.upsert(cur, "issues", "id", issue_id, data)
|
||||
|
||||
def get_issue_select_details(self, issue_id: int) -> SelectDetails:
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
with con:
|
||||
cur = con.cursor()
|
||||
con.text_factory = str
|
||||
|
||||
cur.execute("SELECT super_url,thumb_url,cover_date,site_detail_url FROM Issues WHERE id=?", [issue_id])
|
||||
row = cur.fetchone()
|
||||
|
||||
details = SelectDetails(
|
||||
{
|
||||
"image_url": None,
|
||||
"thumb_image_url": None,
|
||||
"cover_date": None,
|
||||
"site_detail_url": None,
|
||||
}
|
||||
)
|
||||
if row is not None and row[0] is not None:
|
||||
details["image_url"] = row[0]
|
||||
details["thumb_image_url"] = row[1]
|
||||
details["cover_date"] = row[2]
|
||||
details["site_detail_url"] = row[3]
|
||||
|
||||
return details
|
||||
|
||||
def upsert(self, cur: lite.Cursor, tablename: str, pkname: str, pkval: Any, data: dict[str, Any]) -> None:
|
||||
"""This does an insert if the given PK doesn't exist, and an
|
||||
update it if does
|
||||
|
||||
TODO: look into checking if UPDATE is needed
|
||||
TODO: should the cursor be created here, and not up the stack?
|
||||
"""
|
||||
|
||||
keys = ""
|
||||
vals = []
|
||||
ins_slots = ""
|
||||
set_slots = ""
|
||||
|
||||
for key in data:
|
||||
|
||||
if keys != "":
|
||||
keys += ", "
|
||||
if ins_slots != "":
|
||||
ins_slots += ", "
|
||||
if set_slots != "":
|
||||
set_slots += ", "
|
||||
|
||||
keys += key
|
||||
vals.append(data[key])
|
||||
ins_slots += "?"
|
||||
set_slots += key + " = ?"
|
||||
|
||||
keys += ", " + pkname
|
||||
vals.append(pkval)
|
||||
ins_slots += ", ?"
|
||||
condition = pkname + " = ?"
|
||||
|
||||
sql_ins = f"INSERT OR IGNORE INTO {tablename} ({keys}) VALUES ({ins_slots})"
|
||||
cur.execute(sql_ins, vals)
|
||||
|
||||
sql_upd = f"UPDATE {tablename} SET {set_slots} WHERE {condition}"
|
||||
cur.execute(sql_upd, vals)
|
||||
794
comictaggerlib/comicvinetalker.py
Normal file
@@ -0,0 +1,794 @@
|
||||
"""A python class to manage communication with Comic Vine's REST API"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Any, Callable, Optional, Union, cast
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from comicapi import utils
|
||||
from comicapi.genericmetadata import GenericMetadata
|
||||
from comicapi.issuestring import IssueString
|
||||
from comictaggerlib import ctversion
|
||||
from comictaggerlib.comicvinecacher import ComicVineCacher
|
||||
from comictaggerlib.resulttypes import CVIssueDetailResults, CVIssuesResults, CVResult, CVVolumeResults, SelectDetails
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from PyQt5 import QtCore, QtNetwork
|
||||
|
||||
qt_available = True
|
||||
except ImportError:
|
||||
qt_available = False
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CVTypeID:
|
||||
Volume = "4050"
|
||||
Issue = "4000"
|
||||
|
||||
|
||||
class ComicVineTalkerException(Exception):
|
||||
Unknown = -1
|
||||
Network = -2
|
||||
InvalidKey = 100
|
||||
RateLimit = 107
|
||||
|
||||
def __init__(self, code: int = -1, desc: str = "") -> None:
|
||||
super().__init__()
|
||||
self.desc = desc
|
||||
self.code = code
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.code in (ComicVineTalkerException.Unknown, ComicVineTalkerException.Network):
|
||||
return self.desc
|
||||
|
||||
return f"CV error #{self.code}: [{self.desc}]. \n"
|
||||
|
||||
|
||||
def list_fetch_complete(url_list: list[str]) -> None:
|
||||
...
|
||||
|
||||
|
||||
def url_fetch_complete(image_url: str, thumb_url: Optional[str]) -> None:
|
||||
...
|
||||
|
||||
|
||||
class ComicVineTalker:
|
||||
logo_url = "http://static.comicvine.com/bundles/comicvinesite/images/logo.png"
|
||||
api_key = ""
|
||||
|
||||
alt_url_list_fetch_complete = list_fetch_complete
|
||||
url_fetch_complete = url_fetch_complete
|
||||
|
||||
@staticmethod
|
||||
def get_rate_limit_message() -> str:
|
||||
if ComicVineTalker.api_key == "":
|
||||
return "Comic Vine rate limit exceeded. You should configue your own Comic Vine API key."
|
||||
|
||||
return "Comic Vine rate limit exceeded. Please wait a bit."
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
||||
self.api_base_url = "https://comicvine.gamespot.com/api"
|
||||
self.wait_for_rate_limit = False
|
||||
|
||||
# key that is registered to comictagger
|
||||
default_api_key = "27431e6787042105bd3e47e169a624521f89f3a4"
|
||||
|
||||
self.issue_id: Optional[int] = None
|
||||
|
||||
if ComicVineTalker.api_key == "":
|
||||
self.api_key = default_api_key
|
||||
else:
|
||||
self.api_key = ComicVineTalker.api_key
|
||||
|
||||
self.log_func: Optional[Callable[[str], None]] = None
|
||||
|
||||
if qt_available:
|
||||
self.nam = QtNetwork.QNetworkAccessManager()
|
||||
|
||||
def set_log_func(self, log_func: Callable[[str], None]) -> None:
|
||||
self.log_func = log_func
|
||||
|
||||
def write_log(self, text: str) -> None:
|
||||
if self.log_func is None:
|
||||
logger.info(text)
|
||||
else:
|
||||
self.log_func(text)
|
||||
|
||||
def parse_date_str(self, date_str: str) -> tuple[Optional[int], Optional[int], Optional[int]]:
|
||||
day = None
|
||||
month = None
|
||||
year = None
|
||||
if date_str:
|
||||
parts = date_str.split("-")
|
||||
year = utils.xlate(parts[0], True)
|
||||
if len(parts) > 1:
|
||||
month = utils.xlate(parts[1], True)
|
||||
if len(parts) > 2:
|
||||
day = utils.xlate(parts[2], True)
|
||||
return day, month, year
|
||||
|
||||
def test_key(self, key: str) -> bool:
|
||||
|
||||
try:
|
||||
test_url = self.api_base_url + "/issue/1/?api_key=" + key + "&format=json&field_list=name"
|
||||
|
||||
cv_response: CVResult = requests.get(
|
||||
test_url, headers={"user-agent": "comictagger/" + ctversion.version}
|
||||
).json()
|
||||
|
||||
# Bogus request, but if the key is wrong, you get error 100: "Invalid API Key"
|
||||
return cv_response["status_code"] != 100
|
||||
except:
|
||||
return False
|
||||
|
||||
def get_cv_content(self, url: str, params: dict[str, Any]) -> CVResult:
|
||||
"""
|
||||
Get the content from the CV server. If we're in "wait mode" and status code is a rate limit error
|
||||
sleep for a bit and retry.
|
||||
"""
|
||||
total_time_waited = 0
|
||||
limit_wait_time = 1
|
||||
counter = 0
|
||||
wait_times = [1, 2, 3, 4]
|
||||
while True:
|
||||
cv_response: CVResult = self.get_url_content(url, params)
|
||||
if self.wait_for_rate_limit and cv_response["status_code"] == ComicVineTalkerException.RateLimit:
|
||||
self.write_log(f"Rate limit encountered. Waiting for {limit_wait_time} minutes\n")
|
||||
time.sleep(limit_wait_time * 60)
|
||||
total_time_waited += limit_wait_time
|
||||
limit_wait_time = wait_times[counter]
|
||||
if counter < 3:
|
||||
counter += 1
|
||||
# don't wait much more than 20 minutes
|
||||
if total_time_waited < 20:
|
||||
continue
|
||||
if cv_response["status_code"] != 1:
|
||||
self.write_log(
|
||||
f"Comic Vine query failed with error #{cv_response['status_code']}: [{cv_response['error']}]. \n"
|
||||
)
|
||||
raise ComicVineTalkerException(cv_response["status_code"], cv_response["error"])
|
||||
|
||||
# it's all good
|
||||
break
|
||||
return cv_response
|
||||
|
||||
def get_url_content(self, url: str, params: dict[str, Any]) -> Any:
|
||||
# connect to server:
|
||||
# if there is a 500 error, try a few more times before giving up
|
||||
# any other error, just bail
|
||||
for tries in range(3):
|
||||
try:
|
||||
resp = requests.get(url, params=params, headers={"user-agent": "comictagger/" + ctversion.version})
|
||||
if resp.status_code == 200:
|
||||
return resp.json()
|
||||
if resp.status_code == 500:
|
||||
self.write_log(f"Try #{tries + 1}: ")
|
||||
time.sleep(1)
|
||||
self.write_log(str(resp.status_code) + "\n")
|
||||
else:
|
||||
break
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
self.write_log(str(e) + "\n")
|
||||
raise ComicVineTalkerException(ComicVineTalkerException.Network, "Network Error!") from e
|
||||
|
||||
raise ComicVineTalkerException(ComicVineTalkerException.Unknown, "Error on Comic Vine server")
|
||||
|
||||
def search_for_series(
|
||||
self, series_name: str, callback: Optional[Callable[[int, int], None]] = None, refresh_cache: bool = False
|
||||
) -> list[CVVolumeResults]:
|
||||
|
||||
# Sanitize the series name for comicvine searching, comicvine search ignore symbols
|
||||
search_series_name = utils.sanitize_title(series_name)
|
||||
|
||||
# before we search online, look in our cache, since we might have done this same search recently
|
||||
cvc = ComicVineCacher()
|
||||
if not refresh_cache:
|
||||
cached_search_results = cvc.get_search_results(series_name)
|
||||
|
||||
if len(cached_search_results) > 0:
|
||||
return cached_search_results
|
||||
|
||||
params = {
|
||||
"api_key": self.api_key,
|
||||
"format": "json",
|
||||
"resources": "volume",
|
||||
"query": search_series_name,
|
||||
"field_list": "volume,name,id,start_year,publisher,image,description,count_of_issues",
|
||||
"page": 1,
|
||||
"limit": 100,
|
||||
}
|
||||
|
||||
cv_response = self.get_cv_content(self.api_base_url + "/search", params)
|
||||
|
||||
search_results: list[CVVolumeResults] = []
|
||||
|
||||
# see http://api.comicvine.com/documentation/#handling_responses
|
||||
|
||||
current_result_count = cv_response["number_of_page_results"]
|
||||
total_result_count = cv_response["number_of_total_results"]
|
||||
|
||||
# 8 Dec 2018 - Comic Vine changed query results again. Terms are now
|
||||
# ORed together, and we get thousands of results. Good news is the
|
||||
# results are sorted by relevance, so we can be smart about halting the search.
|
||||
# 1. Don't fetch more than some sane amount of pages.
|
||||
max_results = 500
|
||||
# 2. Halt when not all of our search terms are present in a result
|
||||
# 3. Halt when the results contain more (plus threshold) words than our search
|
||||
result_word_count_max = len(search_series_name.split()) + 3
|
||||
|
||||
total_result_count = min(total_result_count, max_results)
|
||||
|
||||
if callback is None:
|
||||
self.write_log(
|
||||
f"Found {cv_response['number_of_page_results']} of {cv_response['number_of_total_results']} results\n"
|
||||
)
|
||||
search_results.extend(cast(list[CVVolumeResults], cv_response["results"]))
|
||||
page = 1
|
||||
|
||||
if callback is not None:
|
||||
callback(current_result_count, total_result_count)
|
||||
|
||||
# see if we need to keep asking for more pages...
|
||||
stop_searching = False
|
||||
while current_result_count < total_result_count:
|
||||
|
||||
last_result = search_results[-1]["name"]
|
||||
|
||||
# Sanitize the series name for comicvine searching, comicvine search ignore symbols
|
||||
last_result = utils.sanitize_title(last_result)
|
||||
|
||||
# See if the last result's name has all the of the search terms.
|
||||
# If not, break out of this, loop, we're done.
|
||||
for term in search_series_name.split():
|
||||
if term not in last_result.lower():
|
||||
stop_searching = True
|
||||
break
|
||||
|
||||
# Also, stop searching when the word count of last results is too much longer than our search terms list
|
||||
if len(last_result) > result_word_count_max:
|
||||
stop_searching = True
|
||||
|
||||
if stop_searching:
|
||||
break
|
||||
|
||||
if callback is None:
|
||||
self.write_log(f"getting another page of results {current_result_count} of {total_result_count}...\n")
|
||||
page += 1
|
||||
|
||||
params["page"] = page
|
||||
cv_response = self.get_cv_content(self.api_base_url + "/search", params)
|
||||
|
||||
search_results.extend(cast(list[CVVolumeResults], cv_response["results"]))
|
||||
current_result_count += cv_response["number_of_page_results"]
|
||||
|
||||
if callback is not None:
|
||||
callback(current_result_count, total_result_count)
|
||||
|
||||
# Remove any search results that don't contain all the search terms (iterate backwards for easy removal)
|
||||
for i in range(len(search_results) - 1, -1, -1):
|
||||
record = search_results[i]
|
||||
# Sanitize the series name for comicvine searching, comicvine search ignore symbols
|
||||
record_name = utils.sanitize_title(record["name"])
|
||||
for term in search_series_name.split():
|
||||
|
||||
if term not in record_name:
|
||||
del search_results[i]
|
||||
break
|
||||
|
||||
# cache these search results
|
||||
cvc.add_search_results(series_name, search_results)
|
||||
|
||||
return search_results
|
||||
|
||||
def fetch_volume_data(self, series_id: int) -> CVVolumeResults:
|
||||
|
||||
# before we search online, look in our cache, since we might already have this info
|
||||
cvc = ComicVineCacher()
|
||||
cached_volume_result = cvc.get_volume_info(series_id)
|
||||
|
||||
if cached_volume_result is not None:
|
||||
return cached_volume_result
|
||||
|
||||
volume_url = self.api_base_url + "/volume/" + CVTypeID.Volume + "-" + str(series_id)
|
||||
|
||||
params = {
|
||||
"api_key": self.api_key,
|
||||
"format": "json",
|
||||
"field_list": "name,id,start_year,publisher,count_of_issues",
|
||||
}
|
||||
cv_response = self.get_cv_content(volume_url, params)
|
||||
|
||||
volume_results = cast(CVVolumeResults, cv_response["results"])
|
||||
|
||||
if volume_results:
|
||||
cvc.add_volume_info(volume_results)
|
||||
|
||||
return volume_results
|
||||
|
||||
def fetch_issues_by_volume(self, series_id: int) -> list[CVIssuesResults]:
|
||||
# before we search online, look in our cache, since we might already have this info
|
||||
cvc = ComicVineCacher()
|
||||
cached_volume_issues_result = cvc.get_volume_issues_info(series_id)
|
||||
|
||||
if cached_volume_issues_result:
|
||||
return cached_volume_issues_result
|
||||
|
||||
params = {
|
||||
"api_key": self.api_key,
|
||||
"filter": "volume:" + str(series_id),
|
||||
"format": "json",
|
||||
"field_list": "id,volume,issue_number,name,image,cover_date,site_detail_url,description",
|
||||
"offset": 0,
|
||||
}
|
||||
cv_response = self.get_cv_content(self.api_base_url + "/issues/", params)
|
||||
|
||||
current_result_count = cv_response["number_of_page_results"]
|
||||
total_result_count = cv_response["number_of_total_results"]
|
||||
|
||||
volume_issues_result = cast(list[CVIssuesResults], cv_response["results"])
|
||||
page = 1
|
||||
offset = 0
|
||||
|
||||
# see if we need to keep asking for more pages...
|
||||
while current_result_count < total_result_count:
|
||||
page += 1
|
||||
offset += cv_response["number_of_page_results"]
|
||||
|
||||
params["offset"] = offset
|
||||
cv_response = self.get_cv_content(self.api_base_url + "/issues/", params)
|
||||
|
||||
volume_issues_result.extend(cast(list[CVIssuesResults], cv_response["results"]))
|
||||
current_result_count += cv_response["number_of_page_results"]
|
||||
|
||||
self.repair_urls(volume_issues_result)
|
||||
|
||||
cvc.add_volume_issues_info(series_id, volume_issues_result)
|
||||
|
||||
return volume_issues_result
|
||||
|
||||
def fetch_issues_by_volume_issue_num_and_year(
|
||||
self, volume_id_list: list[int], issue_number: str, year: Union[str, int, None]
|
||||
) -> list[CVIssuesResults]:
|
||||
volume_filter = ""
|
||||
for vid in volume_id_list:
|
||||
volume_filter += str(vid) + "|"
|
||||
flt = f"volume:{volume_filter},issue_number:{issue_number}"
|
||||
|
||||
int_year = utils.xlate(year, True)
|
||||
if int_year is not None:
|
||||
flt += f",cover_date:{int_year}-1-1|{int_year+1}-1-1"
|
||||
|
||||
params: dict[str, Union[str, int]] = {
|
||||
"api_key": self.api_key,
|
||||
"format": "json",
|
||||
"field_list": "id,volume,issue_number,name,image,cover_date,site_detail_url,description",
|
||||
"filter": flt,
|
||||
}
|
||||
|
||||
cv_response = self.get_cv_content(self.api_base_url + "/issues", params)
|
||||
|
||||
current_result_count = cv_response["number_of_page_results"]
|
||||
total_result_count = cv_response["number_of_total_results"]
|
||||
|
||||
filtered_issues_result = cast(list[CVIssuesResults], cv_response["results"])
|
||||
page = 1
|
||||
offset = 0
|
||||
|
||||
# see if we need to keep asking for more pages...
|
||||
while current_result_count < total_result_count:
|
||||
page += 1
|
||||
offset += cv_response["number_of_page_results"]
|
||||
|
||||
params["offset"] = offset
|
||||
cv_response = self.get_cv_content(self.api_base_url + "/issues/", params)
|
||||
|
||||
filtered_issues_result.extend(cast(list[CVIssuesResults], cv_response["results"]))
|
||||
current_result_count += cv_response["number_of_page_results"]
|
||||
|
||||
self.repair_urls(filtered_issues_result)
|
||||
|
||||
return filtered_issues_result
|
||||
|
||||
def fetch_issue_data(self, series_id: int, issue_number: str, settings: ComicTaggerSettings) -> GenericMetadata:
|
||||
volume_results = self.fetch_volume_data(series_id)
|
||||
issues_list_results = self.fetch_issues_by_volume(series_id)
|
||||
|
||||
f_record = None
|
||||
for record in issues_list_results:
|
||||
if IssueString(issue_number).as_string() is None:
|
||||
issue_number = "1"
|
||||
if IssueString(record["issue_number"]).as_string().lower() == IssueString(issue_number).as_string().lower():
|
||||
f_record = record
|
||||
break
|
||||
|
||||
if f_record is not None:
|
||||
issue_url = self.api_base_url + "/issue/" + CVTypeID.Issue + "-" + str(f_record["id"])
|
||||
params = {"api_key": self.api_key, "format": "json"}
|
||||
cv_response = self.get_cv_content(issue_url, params)
|
||||
issue_results = cast(CVIssueDetailResults, cv_response["results"])
|
||||
|
||||
else:
|
||||
return GenericMetadata()
|
||||
|
||||
# Now, map the Comic Vine data to generic metadata
|
||||
return self.map_cv_data_to_metadata(volume_results, issue_results, settings)
|
||||
|
||||
def fetch_issue_data_by_issue_id(self, issue_id: int, settings: ComicTaggerSettings) -> GenericMetadata:
|
||||
|
||||
issue_url = self.api_base_url + "/issue/" + CVTypeID.Issue + "-" + str(issue_id)
|
||||
params = {"api_key": self.api_key, "format": "json"}
|
||||
cv_response = self.get_cv_content(issue_url, params)
|
||||
|
||||
issue_results = cast(CVIssueDetailResults, cv_response["results"])
|
||||
|
||||
volume_results = self.fetch_volume_data(issue_results["volume"]["id"])
|
||||
|
||||
# Now, map the Comic Vine data to generic metadata
|
||||
md = self.map_cv_data_to_metadata(volume_results, issue_results, settings)
|
||||
md.is_empty = False
|
||||
return md
|
||||
|
||||
def map_cv_data_to_metadata(
|
||||
self, volume_results: CVVolumeResults, issue_results: CVIssueDetailResults, settings: ComicTaggerSettings
|
||||
) -> GenericMetadata:
|
||||
|
||||
# Now, map the Comic Vine data to generic metadata
|
||||
metadata = GenericMetadata()
|
||||
metadata.is_empty = False
|
||||
|
||||
metadata.series = utils.xlate(issue_results["volume"]["name"])
|
||||
metadata.issue = IssueString(issue_results["issue_number"]).as_string()
|
||||
metadata.title = utils.xlate(issue_results["name"])
|
||||
|
||||
if volume_results["publisher"] is not None:
|
||||
metadata.publisher = utils.xlate(volume_results["publisher"]["name"])
|
||||
metadata.day, metadata.month, metadata.year = self.parse_date_str(issue_results["cover_date"])
|
||||
|
||||
metadata.comments = self.cleanup_html(issue_results["description"], settings.remove_html_tables)
|
||||
if settings.use_series_start_as_volume:
|
||||
metadata.volume = int(volume_results["start_year"])
|
||||
|
||||
metadata.notes = f"Tagged with ComicTagger {ctversion.version} using info from Comic Vine on {datetime.now():%Y-%m-%d %H:%M:%S}. [Issue ID {issue_results['id']}]"
|
||||
metadata.web_link = issue_results["site_detail_url"]
|
||||
|
||||
person_credits = issue_results["person_credits"]
|
||||
for person in person_credits:
|
||||
if "role" in person:
|
||||
roles = person["role"].split(",")
|
||||
for role in roles:
|
||||
# can we determine 'primary' from CV??
|
||||
metadata.add_credit(person["name"], role.title().strip(), False)
|
||||
|
||||
character_credits = issue_results["character_credits"]
|
||||
character_list = []
|
||||
for character in character_credits:
|
||||
character_list.append(character["name"])
|
||||
metadata.characters = utils.list_to_string(character_list)
|
||||
|
||||
team_credits = issue_results["team_credits"]
|
||||
team_list = []
|
||||
for team in team_credits:
|
||||
team_list.append(team["name"])
|
||||
metadata.teams = utils.list_to_string(team_list)
|
||||
|
||||
location_credits = issue_results["location_credits"]
|
||||
location_list = []
|
||||
for location in location_credits:
|
||||
location_list.append(location["name"])
|
||||
metadata.locations = utils.list_to_string(location_list)
|
||||
|
||||
story_arc_credits = issue_results["story_arc_credits"]
|
||||
arc_list = []
|
||||
for arc in story_arc_credits:
|
||||
arc_list.append(arc["name"])
|
||||
if len(arc_list) > 0:
|
||||
metadata.story_arc = utils.list_to_string(arc_list)
|
||||
|
||||
return metadata
|
||||
|
||||
def cleanup_html(self, string: str, remove_html_tables: bool) -> str:
|
||||
if string is None:
|
||||
return ""
|
||||
# find any tables
|
||||
soup = BeautifulSoup(string, "html.parser")
|
||||
tables = soup.findAll("table")
|
||||
|
||||
# remove all newlines first
|
||||
string = string.replace("\n", "")
|
||||
|
||||
# put in our own
|
||||
string = string.replace("<br>", "\n")
|
||||
string = string.replace("</li>", "\n")
|
||||
string = string.replace("</p>", "\n\n")
|
||||
string = string.replace("<h1>", "*")
|
||||
string = string.replace("</h1>", "*\n")
|
||||
string = string.replace("<h2>", "*")
|
||||
string = string.replace("</h2>", "*\n")
|
||||
string = string.replace("<h3>", "*")
|
||||
string = string.replace("</h3>", "*\n")
|
||||
string = string.replace("<h4>", "*")
|
||||
string = string.replace("</h4>", "*\n")
|
||||
string = string.replace("<h5>", "*")
|
||||
string = string.replace("</h5>", "*\n")
|
||||
string = string.replace("<h6>", "*")
|
||||
string = string.replace("</h6>", "*\n")
|
||||
|
||||
# remove the tables
|
||||
p = re.compile(r"<table[^<]*?>.*?</table>")
|
||||
if remove_html_tables:
|
||||
string = p.sub("", string)
|
||||
string = string.replace("*List of covers and their creators:*", "")
|
||||
else:
|
||||
string = p.sub("{}", string)
|
||||
|
||||
# now strip all other tags
|
||||
p = re.compile(r"<[^<]*?>")
|
||||
newstring = p.sub("", string)
|
||||
|
||||
newstring = newstring.replace(" ", " ")
|
||||
newstring = newstring.replace("&", "&")
|
||||
|
||||
newstring = newstring.strip()
|
||||
|
||||
if not remove_html_tables:
|
||||
# now rebuild the tables into text from BSoup
|
||||
try:
|
||||
table_strings = []
|
||||
for table in tables:
|
||||
rows = []
|
||||
hdrs = []
|
||||
col_widths = []
|
||||
for hdr in table.findAll("th"):
|
||||
item = hdr.string.strip()
|
||||
hdrs.append(item)
|
||||
col_widths.append(len(item))
|
||||
rows.append(hdrs)
|
||||
|
||||
for row in table.findAll("tr"):
|
||||
cols = []
|
||||
col = row.findAll("td")
|
||||
i = 0
|
||||
for c in col:
|
||||
item = c.string.strip()
|
||||
cols.append(item)
|
||||
if len(item) > col_widths[i]:
|
||||
col_widths[i] = len(item)
|
||||
i += 1
|
||||
if len(cols) != 0:
|
||||
rows.append(cols)
|
||||
# now we have the data, make it into text
|
||||
fmtstr = ""
|
||||
for w in col_widths:
|
||||
fmtstr += " {{:{}}}|".format(w + 1)
|
||||
width = sum(col_widths) + len(col_widths) * 2
|
||||
table_text = ""
|
||||
counter = 0
|
||||
for row in rows:
|
||||
table_text += fmtstr.format(*row) + "\n"
|
||||
if counter == 0 and len(hdrs) != 0:
|
||||
table_text += "-" * width + "\n"
|
||||
counter += 1
|
||||
|
||||
table_strings.append(table_text)
|
||||
|
||||
newstring = newstring.format(*table_strings)
|
||||
except:
|
||||
# we caught an error rebuilding the table.
|
||||
# just bail and remove the formatting
|
||||
logger.exception("table parse error")
|
||||
newstring.replace("{}", "")
|
||||
|
||||
return newstring
|
||||
|
||||
def fetch_issue_date(self, issue_id: int) -> tuple[Optional[int], Optional[int]]:
|
||||
details = self.fetch_issue_select_details(issue_id)
|
||||
_, month, year = self.parse_date_str(details["cover_date"] or "")
|
||||
return month, year
|
||||
|
||||
def fetch_issue_cover_urls(self, issue_id: int) -> tuple[Optional[str], Optional[str]]:
|
||||
details = self.fetch_issue_select_details(issue_id)
|
||||
return details["image_url"], details["thumb_image_url"]
|
||||
|
||||
def fetch_issue_page_url(self, issue_id: int) -> Optional[str]:
|
||||
details = self.fetch_issue_select_details(issue_id)
|
||||
return details["site_detail_url"]
|
||||
|
||||
def fetch_issue_select_details(self, issue_id: int) -> SelectDetails:
|
||||
cached_details = self.fetch_cached_issue_select_details(issue_id)
|
||||
if cached_details["image_url"] is not None:
|
||||
return cached_details
|
||||
|
||||
issue_url = self.api_base_url + "/issue/" + CVTypeID.Issue + "-" + str(issue_id)
|
||||
|
||||
params = {"api_key": self.api_key, "format": "json", "field_list": "image,cover_date,site_detail_url"}
|
||||
|
||||
cv_response = self.get_cv_content(issue_url, params)
|
||||
results = cast(CVIssueDetailResults, cv_response["results"])
|
||||
|
||||
details: SelectDetails = {
|
||||
"image_url": results["image"]["super_url"],
|
||||
"thumb_image_url": results["image"]["thumb_url"],
|
||||
"cover_date": results["cover_date"],
|
||||
"site_detail_url": results["site_detail_url"],
|
||||
}
|
||||
|
||||
if (
|
||||
details["image_url"] is not None
|
||||
and details["thumb_image_url"] is not None
|
||||
and details["cover_date"] is not None
|
||||
and details["site_detail_url"] is not None
|
||||
):
|
||||
self.cache_issue_select_details(
|
||||
issue_id,
|
||||
details["image_url"],
|
||||
details["thumb_image_url"],
|
||||
details["cover_date"],
|
||||
details["site_detail_url"],
|
||||
)
|
||||
return details
|
||||
|
||||
def fetch_cached_issue_select_details(self, issue_id: int) -> SelectDetails:
|
||||
|
||||
# before we search online, look in our cache, since we might already have this info
|
||||
cvc = ComicVineCacher()
|
||||
return cvc.get_issue_select_details(issue_id)
|
||||
|
||||
def cache_issue_select_details(
|
||||
self, issue_id: int, image_url: str, thumb_url: str, cover_date: str, page_url: str
|
||||
) -> None:
|
||||
cvc = ComicVineCacher()
|
||||
cvc.add_issue_select_details(issue_id, image_url, thumb_url, cover_date, page_url)
|
||||
|
||||
def fetch_alternate_cover_urls(self, issue_id: int, issue_page_url: str) -> list[str]:
|
||||
url_list = self.fetch_cached_alternate_cover_urls(issue_id)
|
||||
if url_list is not None:
|
||||
return url_list
|
||||
|
||||
# scrape the CV issue page URL to get the alternate cover URLs
|
||||
content = requests.get(issue_page_url, headers={"user-agent": "comictagger/" + ctversion.version}).text
|
||||
alt_cover_url_list = self.parse_out_alt_cover_urls(content)
|
||||
|
||||
# cache this alt cover URL list
|
||||
self.cache_alternate_cover_urls(issue_id, alt_cover_url_list)
|
||||
|
||||
return alt_cover_url_list
|
||||
|
||||
def parse_out_alt_cover_urls(self, page_html: str) -> list[str]:
|
||||
soup = BeautifulSoup(page_html, "html.parser")
|
||||
|
||||
alt_cover_url_list = []
|
||||
|
||||
# Using knowledge of the layout of the Comic Vine issue page here:
|
||||
# look for the divs that are in the classes 'imgboxart' and 'issue-cover'
|
||||
div_list = soup.find_all("div")
|
||||
covers_found = 0
|
||||
for d in div_list:
|
||||
if "class" in d.attrs:
|
||||
c = d["class"]
|
||||
if "imgboxart" in c and "issue-cover" in c:
|
||||
if d.img["src"].startswith("http"):
|
||||
covers_found += 1
|
||||
if covers_found != 1:
|
||||
alt_cover_url_list.append(d.img["src"])
|
||||
elif d.img["data-src"].startswith("http"):
|
||||
covers_found += 1
|
||||
if covers_found != 1:
|
||||
alt_cover_url_list.append(d.img["data-src"])
|
||||
|
||||
return alt_cover_url_list
|
||||
|
||||
def fetch_cached_alternate_cover_urls(self, issue_id: int) -> list[str]:
|
||||
|
||||
# before we search online, look in our cache, since we might already have this info
|
||||
cvc = ComicVineCacher()
|
||||
url_list = cvc.get_alt_covers(issue_id)
|
||||
|
||||
return url_list
|
||||
|
||||
def cache_alternate_cover_urls(self, issue_id: int, url_list: list[str]) -> None:
|
||||
cvc = ComicVineCacher()
|
||||
cvc.add_alt_covers(issue_id, url_list)
|
||||
|
||||
def async_fetch_issue_cover_urls(self, issue_id: int) -> None:
|
||||
|
||||
self.issue_id = issue_id
|
||||
details = self.fetch_cached_issue_select_details(issue_id)
|
||||
if details["image_url"] is not None:
|
||||
ComicVineTalker.url_fetch_complete(details["image_url"], details["thumb_image_url"])
|
||||
return
|
||||
|
||||
issue_url = (
|
||||
self.api_base_url
|
||||
+ "/issue/"
|
||||
+ CVTypeID.Issue
|
||||
+ "-"
|
||||
+ str(issue_id)
|
||||
+ "/?api_key="
|
||||
+ self.api_key
|
||||
+ "&format=json&field_list=image,cover_date,site_detail_url"
|
||||
)
|
||||
|
||||
self.nam.finished.connect(self.async_fetch_issue_cover_url_complete)
|
||||
self.nam.get(QtNetwork.QNetworkRequest(QtCore.QUrl(issue_url)))
|
||||
|
||||
def async_fetch_issue_cover_url_complete(self, reply: "QtNetwork.QNetworkReply") -> None:
|
||||
# read in the response
|
||||
data = reply.readAll()
|
||||
|
||||
try:
|
||||
cv_response = cast(CVResult, json.loads(bytes(data)))
|
||||
except Exception:
|
||||
logger.exception("Comic Vine query failed to get JSON data\n%s", str(data))
|
||||
return
|
||||
|
||||
if cv_response["status_code"] != 1:
|
||||
logger.error("Comic Vine query failed with error: [%s]. ", cv_response["error"])
|
||||
return
|
||||
|
||||
result = cast(CVIssuesResults, cv_response["results"])
|
||||
|
||||
image_url = result["image"]["super_url"]
|
||||
thumb_url = result["image"]["thumb_url"]
|
||||
cover_date = result["cover_date"]
|
||||
page_url = result["site_detail_url"]
|
||||
|
||||
self.cache_issue_select_details(cast(int, self.issue_id), image_url, thumb_url, cover_date, page_url)
|
||||
|
||||
ComicVineTalker.url_fetch_complete(image_url, thumb_url)
|
||||
|
||||
def async_fetch_alternate_cover_urls(self, issue_id: int, issue_page_url: str) -> None:
|
||||
# This async version requires the issue page url to be provided!
|
||||
self.issue_id = issue_id
|
||||
url_list = self.fetch_cached_alternate_cover_urls(issue_id)
|
||||
if url_list is not None:
|
||||
ComicVineTalker.alt_url_list_fetch_complete(url_list)
|
||||
return
|
||||
|
||||
self.nam.finished.connect(self.async_fetch_alternate_cover_urls_complete)
|
||||
self.nam.get(QtNetwork.QNetworkRequest(QtCore.QUrl(str(issue_page_url))))
|
||||
|
||||
def async_fetch_alternate_cover_urls_complete(self, reply: "QtNetwork.QNetworkReply") -> None:
|
||||
# read in the response
|
||||
html = str(reply.readAll())
|
||||
alt_cover_url_list = self.parse_out_alt_cover_urls(html)
|
||||
|
||||
# cache this alt cover URL list
|
||||
self.cache_alternate_cover_urls(cast(int, self.issue_id), alt_cover_url_list)
|
||||
|
||||
ComicVineTalker.alt_url_list_fetch_complete(alt_cover_url_list)
|
||||
|
||||
def repair_urls(
|
||||
self, issue_list: Union[list[CVIssuesResults], list[CVVolumeResults], list[CVIssueDetailResults]]
|
||||
) -> None:
|
||||
# make sure there are URLs for the image fields
|
||||
for issue in issue_list:
|
||||
if issue["image"] is None:
|
||||
issue["image"] = {
|
||||
"super_url": ComicVineTalker.logo_url,
|
||||
"thumb_url": ComicVineTalker.logo_url,
|
||||
}
|
||||
326
comictaggerlib/coverimagewidget.py
Normal file
@@ -0,0 +1,326 @@
|
||||
"""A PyQt5 widget to display cover images
|
||||
|
||||
Display cover images from either a local archive, or from Comic Vine.
|
||||
TODO: This should be re-factored using subclasses!
|
||||
"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
from typing import Callable, Optional, Union, cast
|
||||
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets, uic
|
||||
|
||||
from comicapi import utils
|
||||
from comicapi.comicarchive import ComicArchive
|
||||
from comictaggerlib.comicvinetalker import ComicVineTalker
|
||||
from comictaggerlib.imagefetcher import ImageFetcher
|
||||
from comictaggerlib.imagepopup import ImagePopup
|
||||
from comictaggerlib.pageloader import PageLoader
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
from comictaggerlib.ui.qtutils import get_qimage_from_data, reduce_widget_font_size
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def clickable(widget: QtWidgets.QWidget) -> QtCore.pyqtBoundSignal:
|
||||
"""Allow a label to be clickable"""
|
||||
|
||||
class Filter(QtCore.QObject):
|
||||
|
||||
dblclicked = QtCore.pyqtSignal()
|
||||
|
||||
def eventFilter(self, obj: QtCore.QObject, event: QtCore.QEvent) -> bool:
|
||||
if obj == widget:
|
||||
if event.type() == QtCore.QEvent.Type.MouseButtonDblClick:
|
||||
self.dblclicked.emit()
|
||||
return True
|
||||
return False
|
||||
|
||||
flt = Filter(widget)
|
||||
widget.installEventFilter(flt)
|
||||
return flt.dblclicked
|
||||
|
||||
|
||||
class Signal(QtCore.QObject):
|
||||
alt_url_list_fetch_complete = QtCore.pyqtSignal(list)
|
||||
url_fetch_complete = QtCore.pyqtSignal(str, str)
|
||||
image_fetch_complete = QtCore.pyqtSignal(QtCore.QByteArray)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
list_fetch: Callable[[list[str]], None],
|
||||
url_fetch: Callable[[str, str], None],
|
||||
image_fetch: Callable[[bytes], None],
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self.alt_url_list_fetch_complete.connect(list_fetch)
|
||||
self.url_fetch_complete.connect(url_fetch)
|
||||
self.image_fetch_complete.connect(image_fetch)
|
||||
|
||||
def emit_list(self, url_list: list[str]) -> None:
|
||||
self.alt_url_list_fetch_complete.emit(url_list)
|
||||
|
||||
def emit_url(self, image_url: str, thumb_url: Optional[str]) -> None:
|
||||
self.url_fetch_complete.emit(image_url, thumb_url)
|
||||
|
||||
def emit_image(self, image_data: Union[bytes, QtCore.QByteArray]) -> None:
|
||||
self.image_fetch_complete.emit(image_data)
|
||||
|
||||
|
||||
class CoverImageWidget(QtWidgets.QWidget):
|
||||
ArchiveMode = 0
|
||||
AltCoverMode = 1
|
||||
URLMode = 1
|
||||
DataMode = 3
|
||||
|
||||
def __init__(self, parent: QtWidgets.QWidget, mode: int, expand_on_click: bool = True) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("coverimagewidget.ui"), self)
|
||||
|
||||
reduce_widget_font_size(self.label)
|
||||
|
||||
self.sig = Signal(
|
||||
self.alt_cover_url_list_fetch_complete, self.primary_url_fetch_complete, self.cover_remote_fetch_complete
|
||||
)
|
||||
|
||||
self.mode: int = mode
|
||||
self.page_loader: Optional[PageLoader] = None
|
||||
self.showControls = True
|
||||
|
||||
self.current_pixmap = QtGui.QPixmap()
|
||||
|
||||
self.comic_archive: Optional[ComicArchive] = None
|
||||
self.issue_id: Optional[int] = None
|
||||
self.url_list: list[str] = []
|
||||
if self.page_loader is not None:
|
||||
self.page_loader.abandoned = True
|
||||
self.page_loader = None
|
||||
self.imageIndex = -1
|
||||
self.imageCount = 1
|
||||
self.imageData = bytes()
|
||||
|
||||
self.btnLeft.setIcon(QtGui.QIcon(ComicTaggerSettings.get_graphic("left.png")))
|
||||
self.btnRight.setIcon(QtGui.QIcon(ComicTaggerSettings.get_graphic("right.png")))
|
||||
|
||||
self.btnLeft.clicked.connect(self.decrement_image)
|
||||
self.btnRight.clicked.connect(self.increment_image)
|
||||
if expand_on_click:
|
||||
clickable(self.lblImage).connect(self.show_popup)
|
||||
else:
|
||||
self.lblImage.setToolTip("")
|
||||
|
||||
self.update_content()
|
||||
|
||||
def reset_widget(self) -> None:
|
||||
self.comic_archive = None
|
||||
self.issue_id = None
|
||||
self.url_list = []
|
||||
if self.page_loader is not None:
|
||||
self.page_loader.abandoned = True
|
||||
self.page_loader = None
|
||||
self.imageIndex = -1
|
||||
self.imageCount = 1
|
||||
self.imageData = bytes()
|
||||
|
||||
def clear(self) -> None:
|
||||
self.reset_widget()
|
||||
self.update_content()
|
||||
|
||||
def increment_image(self) -> None:
|
||||
self.imageIndex += 1
|
||||
if self.imageIndex == self.imageCount:
|
||||
self.imageIndex = 0
|
||||
self.update_content()
|
||||
|
||||
def decrement_image(self) -> None:
|
||||
self.imageIndex -= 1
|
||||
if self.imageIndex == -1:
|
||||
self.imageIndex = self.imageCount - 1
|
||||
self.update_content()
|
||||
|
||||
def set_archive(self, ca: ComicArchive, page: int = 0) -> None:
|
||||
if self.mode == CoverImageWidget.ArchiveMode:
|
||||
self.reset_widget()
|
||||
self.comic_archive = ca
|
||||
self.imageIndex = page
|
||||
self.imageCount = ca.get_number_of_pages()
|
||||
self.update_content()
|
||||
|
||||
def set_url(self, url: str) -> None:
|
||||
if self.mode == CoverImageWidget.URLMode:
|
||||
self.reset_widget()
|
||||
self.update_content()
|
||||
|
||||
self.url_list = [url]
|
||||
self.imageIndex = 0
|
||||
self.imageCount = 1
|
||||
self.update_content()
|
||||
|
||||
def set_issue_id(self, issue_id: int) -> None:
|
||||
if self.mode == CoverImageWidget.AltCoverMode:
|
||||
self.reset_widget()
|
||||
self.update_content()
|
||||
self.issue_id = issue_id
|
||||
|
||||
comic_vine = ComicVineTalker()
|
||||
ComicVineTalker.url_fetch_complete = self.sig.emit_url
|
||||
comic_vine.async_fetch_issue_cover_urls(self.issue_id)
|
||||
|
||||
def set_image_data(self, image_data: bytes) -> None:
|
||||
if self.mode == CoverImageWidget.DataMode:
|
||||
self.reset_widget()
|
||||
|
||||
if image_data:
|
||||
self.imageIndex = 0
|
||||
self.imageData = image_data
|
||||
else:
|
||||
self.imageIndex = -1
|
||||
|
||||
self.update_content()
|
||||
|
||||
def primary_url_fetch_complete(self, primary_url: str, thumb_url: Optional[str]) -> None:
|
||||
self.url_list.append(str(primary_url))
|
||||
self.imageIndex = 0
|
||||
self.imageCount = len(self.url_list)
|
||||
self.update_content()
|
||||
|
||||
# defer the alt cover search
|
||||
QtCore.QTimer.singleShot(1, self.start_alt_cover_search)
|
||||
|
||||
def start_alt_cover_search(self) -> None:
|
||||
|
||||
if self.issue_id is not None:
|
||||
# now we need to get the list of alt cover URLs
|
||||
self.label.setText("Searching for alt. covers...")
|
||||
|
||||
# page URL should already be cached, so no need to defer
|
||||
comic_vine = ComicVineTalker()
|
||||
issue_page_url = comic_vine.fetch_issue_page_url(self.issue_id)
|
||||
ComicVineTalker.alt_url_list_fetch_complete = self.sig.emit_list
|
||||
comic_vine.async_fetch_alternate_cover_urls(utils.xlate(self.issue_id), cast(str, issue_page_url))
|
||||
|
||||
def alt_cover_url_list_fetch_complete(self, url_list: list[str]) -> None:
|
||||
if len(url_list) > 0:
|
||||
self.url_list.extend(url_list)
|
||||
self.imageCount = len(self.url_list)
|
||||
self.update_controls()
|
||||
|
||||
def set_page(self, pagenum: int) -> None:
|
||||
if self.mode == CoverImageWidget.ArchiveMode:
|
||||
self.imageIndex = pagenum
|
||||
self.update_content()
|
||||
|
||||
def update_content(self) -> None:
|
||||
self.update_image()
|
||||
self.update_controls()
|
||||
|
||||
def update_image(self) -> None:
|
||||
if self.imageIndex == -1:
|
||||
self.load_default()
|
||||
elif self.mode in [CoverImageWidget.AltCoverMode, CoverImageWidget.URLMode]:
|
||||
self.load_url()
|
||||
elif self.mode == CoverImageWidget.DataMode:
|
||||
self.cover_remote_fetch_complete(self.imageData)
|
||||
else:
|
||||
self.load_page()
|
||||
|
||||
def update_controls(self) -> None:
|
||||
if not self.showControls or self.mode == CoverImageWidget.DataMode:
|
||||
self.btnLeft.hide()
|
||||
self.btnRight.hide()
|
||||
self.label.hide()
|
||||
return
|
||||
|
||||
if self.imageIndex == -1 or self.imageCount == 1:
|
||||
self.btnLeft.setEnabled(False)
|
||||
self.btnRight.setEnabled(False)
|
||||
self.btnLeft.hide()
|
||||
self.btnRight.hide()
|
||||
else:
|
||||
self.btnLeft.setEnabled(True)
|
||||
self.btnRight.setEnabled(True)
|
||||
self.btnLeft.show()
|
||||
self.btnRight.show()
|
||||
|
||||
if self.imageIndex == -1 or self.imageCount == 1:
|
||||
self.label.setText("")
|
||||
elif self.mode == CoverImageWidget.AltCoverMode:
|
||||
self.label.setText(f"Cover {self.imageIndex + 1} (of {self.imageCount})")
|
||||
else:
|
||||
self.label.setText(f"Page {self.imageIndex + 1} (of {self.imageCount})")
|
||||
|
||||
def load_url(self) -> None:
|
||||
self.load_default()
|
||||
self.cover_fetcher = ImageFetcher()
|
||||
ImageFetcher.image_fetch_complete = self.sig.emit_image
|
||||
self.cover_fetcher.fetch(self.url_list[self.imageIndex])
|
||||
|
||||
# called when the image is done loading from internet
|
||||
def cover_remote_fetch_complete(self, image_data: bytes) -> None:
|
||||
img = get_qimage_from_data(image_data)
|
||||
self.current_pixmap = QtGui.QPixmap.fromImage(img)
|
||||
self.set_display_pixmap()
|
||||
|
||||
def load_page(self) -> None:
|
||||
if self.comic_archive is not None:
|
||||
if self.page_loader is not None:
|
||||
self.page_loader.abandoned = True
|
||||
self.page_loader = PageLoader(self.comic_archive, self.imageIndex)
|
||||
self.page_loader.loadComplete.connect(self.page_load_complete)
|
||||
self.page_loader.start()
|
||||
|
||||
def page_load_complete(self, image_data: bytes) -> None:
|
||||
img = get_qimage_from_data(image_data)
|
||||
self.current_pixmap = QtGui.QPixmap.fromImage(img)
|
||||
self.set_display_pixmap()
|
||||
self.page_loader = None
|
||||
|
||||
def load_default(self) -> None:
|
||||
self.current_pixmap = QtGui.QPixmap(ComicTaggerSettings.get_graphic("nocover.png"))
|
||||
self.set_display_pixmap()
|
||||
|
||||
def resizeEvent(self, resize_event: QtGui.QResizeEvent) -> None:
|
||||
if self.current_pixmap is not None:
|
||||
self.set_display_pixmap()
|
||||
|
||||
def set_display_pixmap(self) -> None:
|
||||
"""The deltas let us know what the new width and height of the label will be"""
|
||||
|
||||
new_h = self.frame.height()
|
||||
new_w = self.frame.width()
|
||||
frame_w = self.frame.width()
|
||||
frame_h = self.frame.height()
|
||||
|
||||
new_h -= 4
|
||||
new_w -= 4
|
||||
|
||||
new_h = max(new_h, 0)
|
||||
new_w = max(new_w, 0)
|
||||
|
||||
# scale the pixmap to fit in the frame
|
||||
scaled_pixmap = self.current_pixmap.scaled(new_w, new_h, QtCore.Qt.AspectRatioMode.KeepAspectRatio)
|
||||
self.lblImage.setPixmap(scaled_pixmap)
|
||||
|
||||
# move and resize the label to be centered in the fame
|
||||
img_w = scaled_pixmap.width()
|
||||
img_h = scaled_pixmap.height()
|
||||
self.lblImage.resize(img_w, img_h)
|
||||
self.lblImage.move(int((frame_w - img_w) / 2), int((frame_h - img_h) / 2))
|
||||
|
||||
def show_popup(self) -> None:
|
||||
ImagePopup(self, self.current_pixmap)
|
||||
96
comictaggerlib/crediteditorwindow.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""A PyQT4 dialog to edit credits"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from PyQt5 import QtWidgets, uic
|
||||
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CreditEditorWindow(QtWidgets.QDialog):
|
||||
ModeEdit = 0
|
||||
ModeNew = 1
|
||||
|
||||
def __init__(self, parent: QtWidgets.QWidget, mode: int, role: str, name: str, primary: bool) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("crediteditorwindow.ui"), self)
|
||||
|
||||
self.mode = mode
|
||||
|
||||
if self.mode == self.ModeEdit:
|
||||
self.setWindowTitle("Edit Credit")
|
||||
else:
|
||||
self.setWindowTitle("New Credit")
|
||||
|
||||
# Add the entries to the role combobox
|
||||
self.cbRole.addItem("")
|
||||
self.cbRole.addItem("Writer")
|
||||
self.cbRole.addItem("Artist")
|
||||
self.cbRole.addItem("Penciller")
|
||||
self.cbRole.addItem("Inker")
|
||||
self.cbRole.addItem("Colorist")
|
||||
self.cbRole.addItem("Letterer")
|
||||
self.cbRole.addItem("Cover Artist")
|
||||
self.cbRole.addItem("Editor")
|
||||
self.cbRole.addItem("Other")
|
||||
self.cbRole.addItem("Plotter")
|
||||
self.cbRole.addItem("Scripter")
|
||||
|
||||
self.leName.setText(name)
|
||||
|
||||
if role is not None and role != "":
|
||||
i = self.cbRole.findText(role)
|
||||
if i == -1:
|
||||
self.cbRole.setEditText(role)
|
||||
else:
|
||||
self.cbRole.setCurrentIndex(i)
|
||||
|
||||
self.cbPrimary.setChecked(primary)
|
||||
|
||||
self.cbRole.currentIndexChanged.connect(self.role_changed)
|
||||
self.cbRole.editTextChanged.connect(self.role_changed)
|
||||
|
||||
self.update_primary_button()
|
||||
|
||||
def update_primary_button(self) -> None:
|
||||
enabled = self.current_role_can_be_primary()
|
||||
self.cbPrimary.setEnabled(enabled)
|
||||
|
||||
def current_role_can_be_primary(self) -> bool:
|
||||
role = self.cbRole.currentText()
|
||||
if str(role).lower() == "writer" or str(role).lower() == "artist":
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def role_changed(self, s: Any) -> None:
|
||||
self.update_primary_button()
|
||||
|
||||
def get_credits(self) -> tuple[str, str, bool]:
|
||||
primary = self.current_role_can_be_primary() and self.cbPrimary.isChecked()
|
||||
return self.cbRole.currentText(), self.leName.text(), primary
|
||||
|
||||
def accept(self) -> None:
|
||||
if self.cbRole.currentText() == "" or self.leName.text() == "":
|
||||
QtWidgets.QMessageBox.warning(self, "Whoops", "You need to enter both role and name for a credit.")
|
||||
else:
|
||||
QtWidgets.QDialog.accept(self)
|
||||
62
comictaggerlib/exportwindow.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""A PyQT4 dialog to confirm and set options for export to zip"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
from PyQt5 import QtCore, QtWidgets, uic
|
||||
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ExportConflictOpts:
|
||||
dontCreate = 1
|
||||
overwrite = 2
|
||||
createUnique = 3
|
||||
|
||||
|
||||
class ExportWindow(QtWidgets.QDialog):
|
||||
def __init__(self, parent: QtWidgets.QWidget, settings: ComicTaggerSettings, msg: str) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("exportwindow.ui"), self)
|
||||
self.label.setText(msg)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowType(self.windowFlags() & ~QtCore.Qt.WindowType.WindowContextHelpButtonHint)
|
||||
)
|
||||
|
||||
self.settings = settings
|
||||
|
||||
self.cbxDeleteOriginal.setChecked(False)
|
||||
self.cbxAddToList.setChecked(True)
|
||||
self.radioDontCreate.setChecked(True)
|
||||
|
||||
self.deleteOriginal = False
|
||||
self.addToList = True
|
||||
self.fileConflictBehavior = ExportConflictOpts.dontCreate
|
||||
|
||||
def accept(self) -> None:
|
||||
QtWidgets.QDialog.accept(self)
|
||||
|
||||
self.deleteOriginal = self.cbxDeleteOriginal.isChecked()
|
||||
self.addToList = self.cbxAddToList.isChecked()
|
||||
if self.radioDontCreate.isChecked():
|
||||
self.fileConflictBehavior = ExportConflictOpts.dontCreate
|
||||
elif self.radioCreateNew.isChecked():
|
||||
self.fileConflictBehavior = ExportConflictOpts.createUnique
|
||||
188
comictaggerlib/filerenamer.py
Normal file
@@ -0,0 +1,188 @@
|
||||
"""Functions for renaming files based on metadata"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import calendar
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import string
|
||||
import sys
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
from pathvalidate import sanitize_filename
|
||||
|
||||
from comicapi.genericmetadata import GenericMetadata
|
||||
from comicapi.issuestring import IssueString
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MetadataFormatter(string.Formatter):
|
||||
def __init__(self, smart_cleanup: bool = False, platform: str = "auto") -> None:
|
||||
super().__init__()
|
||||
self.smart_cleanup = smart_cleanup
|
||||
self.platform = platform
|
||||
|
||||
def format_field(self, value: Any, format_spec: str) -> str:
|
||||
if value is None or value == "":
|
||||
return ""
|
||||
return cast(str, super().format_field(value, format_spec))
|
||||
|
||||
def _vformat(
|
||||
self,
|
||||
format_string: str,
|
||||
args: list[Any],
|
||||
kwargs: dict[str, Any],
|
||||
used_args: set[Any],
|
||||
recursion_depth: int,
|
||||
auto_arg_index: int = 0,
|
||||
) -> tuple[str, int]:
|
||||
if recursion_depth < 0:
|
||||
raise ValueError("Max string recursion exceeded")
|
||||
result = []
|
||||
lstrip = False
|
||||
for literal_text, field_name, format_spec, conversion in self.parse(format_string):
|
||||
|
||||
# output the literal text
|
||||
if literal_text:
|
||||
if lstrip:
|
||||
literal_text = literal_text.lstrip("-_)}]#")
|
||||
if self.smart_cleanup:
|
||||
lspace = literal_text[0].isspace() if literal_text else False
|
||||
rspace = literal_text[-1].isspace() if literal_text else False
|
||||
literal_text = " ".join(literal_text.split())
|
||||
if literal_text == "":
|
||||
literal_text = " "
|
||||
else:
|
||||
if lspace:
|
||||
literal_text = " " + literal_text
|
||||
if rspace:
|
||||
literal_text += " "
|
||||
result.append(literal_text)
|
||||
|
||||
lstrip = False
|
||||
# if there's a field, output it
|
||||
if field_name is not None and field_name != "":
|
||||
field_name = field_name.lower()
|
||||
# this is some markup, find the object and do the formatting
|
||||
|
||||
# handle arg indexing when empty field_names are given.
|
||||
if field_name == "":
|
||||
if auto_arg_index is False:
|
||||
raise ValueError("cannot switch from manual field specification to automatic field numbering")
|
||||
field_name = str(auto_arg_index)
|
||||
auto_arg_index += 1
|
||||
elif field_name.isdigit():
|
||||
if auto_arg_index:
|
||||
raise ValueError("cannot switch from manual field specification to automatic field numbering")
|
||||
# disable auto arg incrementing, if it gets used later on, then an exception will be raised
|
||||
auto_arg_index = False
|
||||
|
||||
# given the field_name, find the object it references
|
||||
# and the argument it came from
|
||||
obj, arg_used = self.get_field(field_name, args, kwargs)
|
||||
used_args.add(arg_used)
|
||||
|
||||
# do any conversion on the resulting object
|
||||
obj = self.convert_field(obj, conversion) # type: ignore
|
||||
|
||||
# expand the format spec, if needed
|
||||
format_spec, auto_arg_index = self._vformat(
|
||||
cast(str, format_spec), args, kwargs, used_args, recursion_depth - 1, auto_arg_index=auto_arg_index
|
||||
)
|
||||
|
||||
# format the object and append to the result
|
||||
fmt_obj = self.format_field(obj, format_spec)
|
||||
if fmt_obj == "" and len(result) > 0 and self.smart_cleanup:
|
||||
lstrip = True
|
||||
if result:
|
||||
result[-1] = result[-1].rstrip("-_({[#")
|
||||
if self.smart_cleanup:
|
||||
fmt_obj = " ".join(fmt_obj.split())
|
||||
fmt_obj = str(sanitize_filename(fmt_obj, platform=self.platform))
|
||||
result.append(fmt_obj)
|
||||
|
||||
return "".join(result), auto_arg_index
|
||||
|
||||
|
||||
class FileRenamer:
|
||||
def __init__(self, metadata: Optional[GenericMetadata], platform: str = "auto") -> None:
|
||||
self.template = "{publisher}/{series}/{series} v{volume} #{issue} (of {issue_count}) ({year})"
|
||||
self.smart_cleanup = True
|
||||
self.issue_zero_padding = 3
|
||||
self.metadata = metadata or GenericMetadata()
|
||||
self.move = False
|
||||
self.platform = platform
|
||||
|
||||
def set_metadata(self, metadata: GenericMetadata) -> None:
|
||||
self.metadata = metadata
|
||||
|
||||
def set_issue_zero_padding(self, count: int) -> None:
|
||||
self.issue_zero_padding = count
|
||||
|
||||
def set_smart_cleanup(self, on: bool) -> None:
|
||||
self.smart_cleanup = on
|
||||
|
||||
def set_template(self, template: str) -> None:
|
||||
self.template = template
|
||||
|
||||
def determine_name(self, ext: str) -> str:
|
||||
class Default(dict):
|
||||
def __missing__(self, key: str) -> str:
|
||||
return "{" + key + "}"
|
||||
|
||||
md = self.metadata
|
||||
|
||||
# padding for issue
|
||||
md.issue = IssueString(md.issue).as_string(pad=self.issue_zero_padding)
|
||||
|
||||
template = self.template
|
||||
|
||||
new_name = ""
|
||||
|
||||
fmt = MetadataFormatter(self.smart_cleanup, platform=self.platform)
|
||||
md_dict = vars(md)
|
||||
for role in ["writer", "penciller", "inker", "colorist", "letterer", "cover artist", "editor"]:
|
||||
md_dict[role] = md.get_primary_credit(role)
|
||||
|
||||
if (isinstance(md.month, int) or isinstance(md.month, str) and md.month.isdigit()) and 0 < int(md.month) < 13:
|
||||
md_dict["month_name"] = calendar.month_name[int(md.month)]
|
||||
md_dict["month_abbr"] = calendar.month_abbr[int(md.month)]
|
||||
else:
|
||||
md_dict["month_name"] = ""
|
||||
md_dict["month_abbr"] = ""
|
||||
|
||||
for Component in pathlib.PureWindowsPath(template).parts:
|
||||
if (
|
||||
self.platform.lower() in ["universal", "windows"] or sys.platform.lower() in ["windows"]
|
||||
) and self.smart_cleanup:
|
||||
# colons get special treatment
|
||||
Component = Component.replace(": ", " - ")
|
||||
Component = Component.replace(":", "-")
|
||||
|
||||
new_basename = str(
|
||||
sanitize_filename(fmt.vformat(Component, args=[], kwargs=Default(md_dict)), platform=self.platform)
|
||||
).strip()
|
||||
new_name = os.path.join(new_name, new_basename)
|
||||
|
||||
new_name += ext
|
||||
new_basename += ext
|
||||
|
||||
# remove padding
|
||||
md.issue = IssueString(md.issue).as_string()
|
||||
if self.move:
|
||||
return new_name.strip()
|
||||
return new_basename.strip()
|
||||
408
comictaggerlib/fileselectionlist.py
Normal file
@@ -0,0 +1,408 @@
|
||||
"""A PyQt5 widget for managing list of comic archive files"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Callable, List, Optional, cast
|
||||
|
||||
from PyQt5 import QtCore, QtWidgets, uic
|
||||
|
||||
from comicapi import utils
|
||||
from comicapi.comicarchive import ComicArchive
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
from comictaggerlib.ui.qtutils import center_window_on_parent, reduce_widget_font_size
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FileTableWidgetItem(QtWidgets.QTableWidgetItem):
|
||||
def __lt__(self, other: object) -> bool:
|
||||
return self.data(QtCore.Qt.ItemDataRole.UserRole) < other.data(QtCore.Qt.ItemDataRole.UserRole) # type: ignore
|
||||
|
||||
|
||||
class FileInfo:
|
||||
def __init__(self, ca: ComicArchive) -> None:
|
||||
self.ca: ComicArchive = ca
|
||||
|
||||
|
||||
class FileSelectionList(QtWidgets.QWidget):
|
||||
selectionChanged = QtCore.pyqtSignal(QtCore.QVariant)
|
||||
listCleared = QtCore.pyqtSignal()
|
||||
|
||||
fileColNum = 0
|
||||
CRFlagColNum = 1
|
||||
CBLFlagColNum = 2
|
||||
typeColNum = 3
|
||||
readonlyColNum = 4
|
||||
folderColNum = 5
|
||||
dataColNum = fileColNum
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
parent: QtWidgets.QWidget,
|
||||
settings: ComicTaggerSettings,
|
||||
dirty_flag_verification: Callable[[str, str], bool],
|
||||
) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("fileselectionlist.ui"), self)
|
||||
|
||||
self.settings = settings
|
||||
|
||||
reduce_widget_font_size(self.twList)
|
||||
|
||||
self.twList.setColumnCount(6)
|
||||
self.twList.currentItemChanged.connect(self.current_item_changed_cb)
|
||||
|
||||
self.currentItem = None
|
||||
self.setContextMenuPolicy(QtCore.Qt.ContextMenuPolicy.ActionsContextMenu)
|
||||
self.dirty_flag = False
|
||||
|
||||
select_all_action = QtWidgets.QAction("Select All", self)
|
||||
remove_action = QtWidgets.QAction("Remove Selected Items", self)
|
||||
self.separator = QtWidgets.QAction("", self)
|
||||
self.separator.setSeparator(True)
|
||||
|
||||
select_all_action.setShortcut("Ctrl+A")
|
||||
remove_action.setShortcut("Ctrl+X")
|
||||
|
||||
select_all_action.triggered.connect(self.select_all)
|
||||
remove_action.triggered.connect(self.remove_selection)
|
||||
|
||||
self.addAction(select_all_action)
|
||||
self.addAction(remove_action)
|
||||
self.addAction(self.separator)
|
||||
|
||||
self.dirty_flag_verification = dirty_flag_verification
|
||||
|
||||
def get_sorting(self) -> tuple[int, int]:
|
||||
col = self.twList.horizontalHeader().sortIndicatorSection()
|
||||
order = self.twList.horizontalHeader().sortIndicatorOrder()
|
||||
return int(col), int(order)
|
||||
|
||||
def set_sorting(self, col: int, order: QtCore.Qt.SortOrder) -> None:
|
||||
self.twList.horizontalHeader().setSortIndicator(col, order)
|
||||
|
||||
def add_app_action(self, action: QtWidgets.QAction) -> None:
|
||||
self.insertAction(QtWidgets.QAction(), action)
|
||||
|
||||
def set_modified_flag(self, modified: bool) -> None:
|
||||
self.dirty_flag = modified
|
||||
|
||||
def select_all(self) -> None:
|
||||
self.twList.setRangeSelected(QtWidgets.QTableWidgetSelectionRange(0, 0, self.twList.rowCount() - 1, 5), True)
|
||||
|
||||
def deselect_all(self) -> None:
|
||||
self.twList.setRangeSelected(QtWidgets.QTableWidgetSelectionRange(0, 0, self.twList.rowCount() - 1, 5), False)
|
||||
|
||||
def remove_paths(self, file_list: list[str]) -> None:
|
||||
flist = file_list
|
||||
current_removed = False
|
||||
self.twList.setSortingEnabled(False)
|
||||
for row in reversed(range(self.twList.rowCount())):
|
||||
print(row)
|
||||
ca = self.get_archive_by_row(row)
|
||||
print(flist, str(ca.path.absolute()))
|
||||
if ca and str(ca.path.absolute()) in flist:
|
||||
flist.remove(str(ca.path.absolute()))
|
||||
self.twList.removeRow(row)
|
||||
if row == self.twList.currentRow():
|
||||
current_removed = True
|
||||
|
||||
self.twList.setSortingEnabled(True)
|
||||
|
||||
self.items_removed(current_removed)
|
||||
|
||||
def items_removed(self, current_removed: bool):
|
||||
if self.twList.rowCount() > 0 and current_removed:
|
||||
# since on a removal, we select row 0, make sure callback occurs if
|
||||
# we're already there
|
||||
if self.twList.currentRow() == 0:
|
||||
self.current_item_changed_cb(self.twList.currentItem(), None)
|
||||
self.twList.selectRow(0)
|
||||
elif self.twList.rowCount() <= 0:
|
||||
self.listCleared.emit()
|
||||
|
||||
def remove_archive_list(self, ca_list: list[ComicArchive]) -> None:
|
||||
self.twList.setSortingEnabled(False)
|
||||
current_removed = False
|
||||
for ca in ca_list:
|
||||
for row in range(self.twList.rowCount()):
|
||||
row_ca = self.get_archive_by_row(row)
|
||||
if row_ca == ca:
|
||||
if row == self.twList.currentRow():
|
||||
current_removed = True
|
||||
self.twList.removeRow(row)
|
||||
break
|
||||
self.twList.setSortingEnabled(True)
|
||||
self.items_removed(current_removed)
|
||||
|
||||
def get_archive_by_row(self, row: int) -> Optional[ComicArchive]:
|
||||
if row >= 0:
|
||||
fi: FileInfo = self.twList.item(row, FileSelectionList.dataColNum).data(QtCore.Qt.ItemDataRole.UserRole)
|
||||
return fi.ca
|
||||
return None
|
||||
|
||||
def get_current_archive(self) -> Optional[ComicArchive]:
|
||||
return self.get_archive_by_row(self.twList.currentRow())
|
||||
|
||||
def remove_selection(self) -> None:
|
||||
row_list = []
|
||||
for item in self.twList.selectedItems():
|
||||
if item.column() == 0:
|
||||
row_list.append(item.row())
|
||||
|
||||
if len(row_list) == 0:
|
||||
return
|
||||
|
||||
if self.twList.currentRow() in row_list:
|
||||
if not self.dirty_flag_verification(
|
||||
"Remove Archive", "If you close this archive, data in the form will be lost. Are you sure?"
|
||||
):
|
||||
return
|
||||
|
||||
row_list.sort()
|
||||
row_list.reverse()
|
||||
|
||||
self.twList.currentItemChanged.disconnect(self.current_item_changed_cb)
|
||||
self.twList.setSortingEnabled(False)
|
||||
|
||||
for i in row_list:
|
||||
self.twList.removeRow(i)
|
||||
|
||||
self.twList.setSortingEnabled(True)
|
||||
self.twList.currentItemChanged.connect(self.current_item_changed_cb)
|
||||
|
||||
self.items_removed(True)
|
||||
|
||||
def add_path_list(self, pathlist: list[str]) -> None:
|
||||
|
||||
filelist = utils.get_recursive_filelist(pathlist)
|
||||
# we now have a list of files to add
|
||||
|
||||
# Prog dialog on Linux flakes out for small range, so scale up
|
||||
progdialog = QtWidgets.QProgressDialog("", "Cancel", 0, len(filelist), parent=self)
|
||||
progdialog.setWindowTitle("Adding Files")
|
||||
progdialog.setWindowModality(QtCore.Qt.WindowModality.ApplicationModal)
|
||||
progdialog.setMinimumDuration(300)
|
||||
center_window_on_parent(progdialog)
|
||||
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
first_added = None
|
||||
self.twList.setSortingEnabled(False)
|
||||
for idx, f in enumerate(filelist):
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
if progdialog.wasCanceled():
|
||||
break
|
||||
progdialog.setValue(idx + 1)
|
||||
progdialog.setLabelText(f)
|
||||
center_window_on_parent(progdialog)
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
row = self.add_path_item(f)
|
||||
if first_added is None and row is not None:
|
||||
first_added = row
|
||||
|
||||
progdialog.hide()
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
|
||||
if first_added is not None:
|
||||
self.twList.selectRow(first_added)
|
||||
else:
|
||||
if len(pathlist) == 1 and os.path.isfile(pathlist[0]):
|
||||
QtWidgets.QMessageBox.information(
|
||||
self, "File Open", "Selected file doesn't seem to be a comic archive."
|
||||
)
|
||||
else:
|
||||
QtWidgets.QMessageBox.information(self, "File/Folder Open", "No readable comic archives were found.")
|
||||
|
||||
self.twList.setSortingEnabled(True)
|
||||
|
||||
# Adjust column size
|
||||
self.twList.resizeColumnsToContents()
|
||||
self.twList.setColumnWidth(FileSelectionList.CRFlagColNum, 35)
|
||||
self.twList.setColumnWidth(FileSelectionList.CBLFlagColNum, 35)
|
||||
self.twList.setColumnWidth(FileSelectionList.readonlyColNum, 35)
|
||||
self.twList.setColumnWidth(FileSelectionList.typeColNum, 45)
|
||||
if self.twList.columnWidth(FileSelectionList.fileColNum) > 250:
|
||||
self.twList.setColumnWidth(FileSelectionList.fileColNum, 250)
|
||||
if self.twList.columnWidth(FileSelectionList.folderColNum) > 200:
|
||||
self.twList.setColumnWidth(FileSelectionList.folderColNum, 200)
|
||||
|
||||
def is_list_dupe(self, path: str) -> bool:
|
||||
return self.get_current_list_row(path) >= 0
|
||||
|
||||
def get_current_list_row(self, path: str) -> int:
|
||||
r = 0
|
||||
while r < self.twList.rowCount():
|
||||
ca = cast(ComicArchive, self.get_archive_by_row(r))
|
||||
if ca.path == path:
|
||||
return r
|
||||
r = r + 1
|
||||
|
||||
return -1
|
||||
|
||||
def add_path_item(self, path: str) -> int:
|
||||
path = str(path)
|
||||
path = os.path.abspath(path)
|
||||
|
||||
if self.is_list_dupe(path):
|
||||
return self.get_current_list_row(path)
|
||||
|
||||
ca = ComicArchive(path, self.settings.rar_exe_path, ComicTaggerSettings.get_graphic("nocover.png"))
|
||||
|
||||
if ca.seems_to_be_a_comic_archive():
|
||||
row: int = self.twList.rowCount()
|
||||
self.twList.insertRow(row)
|
||||
|
||||
fi = FileInfo(ca)
|
||||
|
||||
filename_item = QtWidgets.QTableWidgetItem()
|
||||
folder_item = QtWidgets.QTableWidgetItem()
|
||||
cix_item = FileTableWidgetItem()
|
||||
cbi_item = FileTableWidgetItem()
|
||||
readonly_item = FileTableWidgetItem()
|
||||
type_item = QtWidgets.QTableWidgetItem()
|
||||
|
||||
filename_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
filename_item.setData(QtCore.Qt.ItemDataRole.UserRole, fi)
|
||||
self.twList.setItem(row, FileSelectionList.fileColNum, filename_item)
|
||||
|
||||
folder_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, FileSelectionList.folderColNum, folder_item)
|
||||
|
||||
type_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, FileSelectionList.typeColNum, type_item)
|
||||
|
||||
cix_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
cix_item.setTextAlignment(QtCore.Qt.AlignmentFlag.AlignHCenter)
|
||||
self.twList.setItem(row, FileSelectionList.CRFlagColNum, cix_item)
|
||||
|
||||
cbi_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
cbi_item.setTextAlignment(QtCore.Qt.AlignmentFlag.AlignHCenter)
|
||||
self.twList.setItem(row, FileSelectionList.CBLFlagColNum, cbi_item)
|
||||
|
||||
readonly_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
readonly_item.setTextAlignment(QtCore.Qt.AlignmentFlag.AlignHCenter)
|
||||
self.twList.setItem(row, FileSelectionList.readonlyColNum, readonly_item)
|
||||
|
||||
self.update_row(row)
|
||||
|
||||
return row
|
||||
return -1
|
||||
|
||||
def update_row(self, row: int) -> None:
|
||||
if row >= 0:
|
||||
fi: FileInfo = self.twList.item(row, FileSelectionList.dataColNum).data(QtCore.Qt.ItemDataRole.UserRole)
|
||||
|
||||
filename_item = self.twList.item(row, FileSelectionList.fileColNum)
|
||||
folder_item = self.twList.item(row, FileSelectionList.folderColNum)
|
||||
cix_item = self.twList.item(row, FileSelectionList.CRFlagColNum)
|
||||
cbi_item = self.twList.item(row, FileSelectionList.CBLFlagColNum)
|
||||
type_item = self.twList.item(row, FileSelectionList.typeColNum)
|
||||
readonly_item = self.twList.item(row, FileSelectionList.readonlyColNum)
|
||||
|
||||
item_text = os.path.split(fi.ca.path)[0]
|
||||
folder_item.setText(item_text)
|
||||
folder_item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
|
||||
item_text = os.path.split(fi.ca.path)[1]
|
||||
filename_item.setText(item_text)
|
||||
filename_item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
|
||||
if fi.ca.is_sevenzip():
|
||||
item_text = "7Z"
|
||||
elif fi.ca.is_zip():
|
||||
item_text = "ZIP"
|
||||
elif fi.ca.is_rar():
|
||||
item_text = "RAR"
|
||||
else:
|
||||
item_text = ""
|
||||
type_item.setText(item_text)
|
||||
type_item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
|
||||
if fi.ca.has_cix():
|
||||
cix_item.setCheckState(QtCore.Qt.CheckState.Checked)
|
||||
cix_item.setData(QtCore.Qt.ItemDataRole.UserRole, True)
|
||||
else:
|
||||
cix_item.setData(QtCore.Qt.ItemDataRole.UserRole, False)
|
||||
cix_item.setCheckState(QtCore.Qt.CheckState.Unchecked)
|
||||
|
||||
if fi.ca.has_cbi():
|
||||
cbi_item.setCheckState(QtCore.Qt.CheckState.Checked)
|
||||
cbi_item.setData(QtCore.Qt.ItemDataRole.UserRole, True)
|
||||
else:
|
||||
cbi_item.setData(QtCore.Qt.ItemDataRole.UserRole, False)
|
||||
cbi_item.setCheckState(QtCore.Qt.CheckState.Unchecked)
|
||||
|
||||
if not fi.ca.is_writable():
|
||||
readonly_item.setCheckState(QtCore.Qt.CheckState.Checked)
|
||||
readonly_item.setData(QtCore.Qt.ItemDataRole.UserRole, True)
|
||||
else:
|
||||
readonly_item.setData(QtCore.Qt.ItemDataRole.UserRole, False)
|
||||
readonly_item.setCheckState(QtCore.Qt.CheckState.Unchecked)
|
||||
|
||||
# Reading these will force them into the ComicArchive's cache
|
||||
fi.ca.read_cix()
|
||||
fi.ca.has_cbi()
|
||||
|
||||
def get_archive_list(self, all_comics=False) -> List[ComicArchive]:
|
||||
ca_list: List[ComicArchive] = []
|
||||
for r in range(self.twList.rowCount()):
|
||||
item = self.twList.item(r, FileSelectionList.dataColNum)
|
||||
if item.isSelected() or all_comics:
|
||||
fi: FileInfo = item.data(QtCore.Qt.ItemDataRole.UserRole)
|
||||
ca_list.append(fi.ca)
|
||||
|
||||
return ca_list
|
||||
|
||||
def update_current_row(self) -> None:
|
||||
self.update_row(self.twList.currentRow())
|
||||
|
||||
def update_selected_rows(self) -> None:
|
||||
self.twList.setSortingEnabled(False)
|
||||
for r in range(self.twList.rowCount()):
|
||||
item = self.twList.item(r, FileSelectionList.dataColNum)
|
||||
if item.isSelected():
|
||||
self.update_row(r)
|
||||
self.twList.setSortingEnabled(True)
|
||||
|
||||
def current_item_changed_cb(self, curr: Optional[QtCore.QModelIndex], prev: Optional[QtCore.QModelIndex]) -> None:
|
||||
if curr is not None:
|
||||
new_idx = curr.row()
|
||||
old_idx = -1
|
||||
if prev is not None:
|
||||
old_idx = prev.row()
|
||||
|
||||
if old_idx == new_idx:
|
||||
return
|
||||
|
||||
# don't allow change if modified
|
||||
if prev is not None and new_idx != old_idx:
|
||||
if not self.dirty_flag_verification(
|
||||
"Change Archive", "If you change archives now, data in the form will be lost. Are you sure?"
|
||||
):
|
||||
self.twList.currentItemChanged.disconnect(self.current_item_changed_cb)
|
||||
self.twList.setCurrentItem(prev)
|
||||
self.twList.currentItemChanged.connect(self.current_item_changed_cb)
|
||||
# Need to defer this revert selection, for some reason
|
||||
QtCore.QTimer.singleShot(1, self.revert_selection)
|
||||
return
|
||||
|
||||
fi = self.twList.item(new_idx, FileSelectionList.dataColNum).data(QtCore.Qt.ItemDataRole.UserRole)
|
||||
self.selectionChanged.emit(QtCore.QVariant(fi))
|
||||
|
||||
def revert_selection(self) -> None:
|
||||
self.twList.selectRow(self.twList.currentRow())
|
||||
|
Before Width: | Height: | Size: 29 KiB After Width: | Height: | Size: 29 KiB |
|
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 56 KiB |
|
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 16 KiB |
BIN
comictaggerlib/graphics/autotag.png
Normal file
|
After Width: | Height: | Size: 13 KiB |
|
Before Width: | Height: | Size: 3.3 KiB After Width: | Height: | Size: 3.3 KiB |
|
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 3.2 KiB |
BIN
comictaggerlib/graphics/left.png
Normal file
|
After Width: | Height: | Size: 1.2 KiB |
BIN
comictaggerlib/graphics/longbox.png
Normal file
|
After Width: | Height: | Size: 46 KiB |
|
Before Width: | Height: | Size: 49 KiB After Width: | Height: | Size: 49 KiB |
|
Before Width: | Height: | Size: 5.5 KiB After Width: | Height: | Size: 5.5 KiB |
BIN
comictaggerlib/graphics/parse.png
Normal file
|
After Width: | Height: | Size: 4.2 KiB |
BIN
comictaggerlib/graphics/popup_bg.png
Normal file
|
After Width: | Height: | Size: 362 B |
BIN
comictaggerlib/graphics/right.png
Normal file
|
After Width: | Height: | Size: 1.2 KiB |
|
Before Width: | Height: | Size: 3.0 KiB After Width: | Height: | Size: 3.0 KiB |
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 11 KiB |
|
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 56 KiB |
174
comictaggerlib/imagefetcher.py
Normal file
@@ -0,0 +1,174 @@
|
||||
"""A class to manage fetching and caching of images by URL"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sqlite3 as lite
|
||||
import tempfile
|
||||
from typing import Union
|
||||
|
||||
import requests
|
||||
|
||||
from comictaggerlib import ctversion
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
try:
|
||||
from PyQt5 import QtCore, QtNetwork
|
||||
|
||||
qt_available = True
|
||||
except ImportError:
|
||||
qt_available = False
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ImageFetcherException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def fetch_complete(image_data: "Union[bytes, QtCore.QByteArray]") -> None:
|
||||
...
|
||||
|
||||
|
||||
class ImageFetcher:
|
||||
|
||||
image_fetch_complete = fetch_complete
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
||||
self.settings_folder = ComicTaggerSettings.get_settings_folder()
|
||||
self.db_file = os.path.join(self.settings_folder, "image_url_cache.db")
|
||||
self.cache_folder = os.path.join(self.settings_folder, "image_cache")
|
||||
|
||||
self.user_data = None
|
||||
self.fetched_url = ""
|
||||
|
||||
if not os.path.exists(self.db_file):
|
||||
self.create_image_db()
|
||||
|
||||
if qt_available:
|
||||
self.nam = QtNetwork.QNetworkAccessManager()
|
||||
|
||||
def clear_cache(self) -> None:
|
||||
os.unlink(self.db_file)
|
||||
if os.path.isdir(self.cache_folder):
|
||||
shutil.rmtree(self.cache_folder)
|
||||
|
||||
def fetch(self, url: str, blocking: bool = False) -> bytes:
|
||||
"""
|
||||
If called with blocking=True, this will block until the image is
|
||||
fetched.
|
||||
If called with blocking=False, this will run the fetch in the
|
||||
background, and emit a signal when done
|
||||
"""
|
||||
|
||||
self.fetched_url = url
|
||||
|
||||
# first look in the DB
|
||||
image_data = self.get_image_from_cache(url)
|
||||
if blocking or not qt_available:
|
||||
if not image_data:
|
||||
try:
|
||||
image_data = requests.get(url, headers={"user-agent": "comictagger/" + ctversion.version}).content
|
||||
except Exception as e:
|
||||
logger.exception("Fetching url failed: %s")
|
||||
raise ImageFetcherException("Network Error!") from e
|
||||
|
||||
# save the image to the cache
|
||||
self.add_image_to_cache(self.fetched_url, image_data)
|
||||
return image_data
|
||||
|
||||
if qt_available:
|
||||
# if we found it, just emit the signal asap
|
||||
if image_data:
|
||||
ImageFetcher.image_fetch_complete(QtCore.QByteArray(image_data))
|
||||
return bytes()
|
||||
|
||||
# didn't find it. look online
|
||||
self.nam.finished.connect(self.finish_request)
|
||||
self.nam.get(QtNetwork.QNetworkRequest(QtCore.QUrl(url)))
|
||||
|
||||
# we'll get called back when done...
|
||||
return bytes()
|
||||
|
||||
def finish_request(self, reply: "QtNetwork.QNetworkReply") -> None:
|
||||
# read in the image data
|
||||
logger.debug("request finished")
|
||||
image_data = reply.readAll()
|
||||
|
||||
# save the image to the cache
|
||||
self.add_image_to_cache(self.fetched_url, image_data)
|
||||
|
||||
ImageFetcher.image_fetch_complete(image_data)
|
||||
|
||||
def create_image_db(self) -> None:
|
||||
|
||||
# this will wipe out any existing version
|
||||
open(self.db_file, "wb").close()
|
||||
|
||||
# wipe any existing image cache folder too
|
||||
if os.path.isdir(self.cache_folder):
|
||||
shutil.rmtree(self.cache_folder)
|
||||
os.makedirs(self.cache_folder)
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
|
||||
# create tables
|
||||
with con:
|
||||
cur = con.cursor()
|
||||
|
||||
cur.execute("CREATE TABLE Images(url TEXT,filename TEXT,timestamp TEXT,PRIMARY KEY (url))")
|
||||
|
||||
def add_image_to_cache(self, url: str, image_data: "Union[bytes, QtCore.QByteArray]") -> None:
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
|
||||
with con:
|
||||
cur = con.cursor()
|
||||
|
||||
timestamp = datetime.datetime.now()
|
||||
|
||||
tmp_fd, filename = tempfile.mkstemp(dir=self.cache_folder, prefix="img")
|
||||
with os.fdopen(tmp_fd, "w+b") as f:
|
||||
f.write(bytes(image_data))
|
||||
|
||||
cur.execute("INSERT or REPLACE INTO Images VALUES(?, ?, ?)", (url, filename, timestamp))
|
||||
|
||||
def get_image_from_cache(self, url: str) -> bytes:
|
||||
|
||||
con = lite.connect(self.db_file)
|
||||
with con:
|
||||
cur = con.cursor()
|
||||
|
||||
cur.execute("SELECT filename FROM Images WHERE url=?", [url])
|
||||
row = cur.fetchone()
|
||||
|
||||
if row is None:
|
||||
return bytes()
|
||||
|
||||
filename = row[0]
|
||||
image_data = bytes()
|
||||
|
||||
try:
|
||||
with open(filename, "rb") as f:
|
||||
image_data = f.read()
|
||||
f.close()
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
return image_data
|
||||
189
comictaggerlib/imagehasher.py
Executable file
@@ -0,0 +1,189 @@
|
||||
"""A class to manage creating image content hashes, and calculate hamming distances"""
|
||||
|
||||
# Copyright 2013 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import io
|
||||
import logging
|
||||
from functools import reduce
|
||||
from typing import Optional, TypeVar
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
|
||||
pil_available = True
|
||||
except ImportError:
|
||||
pil_available = False
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ImageHasher:
|
||||
def __init__(self, path: Optional[str] = None, data: bytes = bytes(), width: int = 8, height: int = 8) -> None:
|
||||
self.width = width
|
||||
self.height = height
|
||||
|
||||
if path is None and not data:
|
||||
raise IOError
|
||||
|
||||
try:
|
||||
if path is not None:
|
||||
self.image = Image.open(path)
|
||||
else:
|
||||
self.image = Image.open(io.BytesIO(data))
|
||||
except Exception:
|
||||
logger.exception("Image data seems corrupted!")
|
||||
# just generate a bogus image
|
||||
self.image = Image.new("L", (1, 1))
|
||||
|
||||
def average_hash(self) -> int:
|
||||
try:
|
||||
image = self.image.resize((self.width, self.height), Image.ANTIALIAS).convert("L")
|
||||
except Exception:
|
||||
logger.exception("average_hash error")
|
||||
return 0
|
||||
|
||||
pixels = list(image.getdata())
|
||||
avg = sum(pixels) / len(pixels)
|
||||
|
||||
def compare_value_to_avg(i: int) -> int:
|
||||
return 1 if i > avg else 0
|
||||
|
||||
bitlist = list(map(compare_value_to_avg, pixels))
|
||||
|
||||
# build up an int value from the bit list, one bit at a time
|
||||
def set_bit(x: int, idx_val: tuple[int, int]) -> int:
|
||||
(idx, val) = idx_val
|
||||
return x | (val << idx)
|
||||
|
||||
result = reduce(set_bit, enumerate(bitlist), 0)
|
||||
|
||||
return result
|
||||
|
||||
def average_hash2(self) -> None:
|
||||
"""
|
||||
# Got this one from somewhere on the net. Not a clue how the 'convolve2d' works!
|
||||
|
||||
from numpy import array
|
||||
from scipy.signal import convolve2d
|
||||
|
||||
im = self.image.resize((self.width, self.height), Image.ANTIALIAS).convert('L')
|
||||
|
||||
in_data = array((im.getdata())).reshape(self.width, self.height)
|
||||
filt = array([[0,1,0],[1,-4,1],[0,1,0]])
|
||||
filt_data = convolve2d(in_data,filt,mode='same',boundary='symm').flatten()
|
||||
|
||||
result = reduce(lambda x, (y, z): x | (z << y),
|
||||
enumerate(map(lambda i: 0 if i < 0 else 1, filt_data)),
|
||||
0)
|
||||
#print("{0:016x}".format(result))
|
||||
return result
|
||||
"""
|
||||
|
||||
def dct_average_hash(self) -> None:
|
||||
"""
|
||||
# Algorithm source: http://syntaxcandy.blogspot.com/2012/08/perceptual-hash.html
|
||||
|
||||
1. Reduce size. Like Average Hash, pHash starts with a small image.
|
||||
However, the image is larger than 8x8; 32x32 is a good size. This
|
||||
is really done to simplify the DCT computation and not because it
|
||||
is needed to reduce the high frequencies.
|
||||
|
||||
2. Reduce color. The image is reduced to a grayscale just to further
|
||||
simplify the number of computations.
|
||||
|
||||
3. Compute the DCT. The DCT separates the image into a collection of
|
||||
frequencies and scalars. While JPEG uses an 8x8 DCT, this algorithm
|
||||
uses a 32x32 DCT.
|
||||
|
||||
4. Reduce the DCT. This is the magic step. While the DCT is 32x32,
|
||||
just keep the top-left 8x8. Those represent the lowest frequencies in
|
||||
the picture.
|
||||
|
||||
5. Compute the average value. Like the Average Hash, compute the mean DCT
|
||||
value (using only the 8x8 DCT low-frequency values and excluding the first
|
||||
term since the DC coefficient can be significantly different from the other
|
||||
values and will throw off the average). Thanks to David Starkweather for the
|
||||
added information about pHash. He wrote: "the dct hash is based on the low 2D
|
||||
DCT coefficients starting at the second from lowest, leaving out the first DC
|
||||
term. This excludes completely flat image information (i.e. solid colors) from
|
||||
being included in the hash description."
|
||||
|
||||
6. Further reduce the DCT. This is the magic step. Set the 64 hash bits to 0 or
|
||||
1 depending on whether each of the 64 DCT values is above or below the average
|
||||
value. The result doesn't tell us the actual low frequencies; it just tells us
|
||||
the very-rough relative scale of the frequencies to the mean. The result will not
|
||||
vary as long as the overall structure of the image remains the same; this can
|
||||
survive gamma and color histogram adjustments without a problem.
|
||||
|
||||
7. Construct the hash. Set the 64 bits into a 64-bit integer. The order does not
|
||||
matter, just as long as you are consistent.
|
||||
|
||||
|
||||
import numpy
|
||||
import scipy.fftpack
|
||||
numpy.set_printoptions(threshold=10000, linewidth=200, precision=2, suppress=True)
|
||||
|
||||
# Step 1,2
|
||||
im = self.image.resize((32, 32), Image.ANTIALIAS).convert("L")
|
||||
in_data = numpy.asarray(im)
|
||||
|
||||
# Step 3
|
||||
dct = scipy.fftpack.dct(in_data.astype(float))
|
||||
|
||||
# Step 4
|
||||
# Just skip the top and left rows when slicing, as suggested somewhere else...
|
||||
lofreq_dct = dct[1:9, 1:9].flatten()
|
||||
|
||||
# Step 5
|
||||
avg = (lofreq_dct.sum()) / (lofreq_dct.size)
|
||||
median = numpy.median(lofreq_dct)
|
||||
|
||||
thresh = avg
|
||||
|
||||
# Step 6
|
||||
def compare_value_to_thresh(i):
|
||||
return (1 if i > thresh else 0)
|
||||
|
||||
bitlist = map(compare_value_to_thresh, lofreq_dct)
|
||||
|
||||
#Step 7
|
||||
def set_bit(x, (idx, val)):
|
||||
return (x | (val << idx))
|
||||
|
||||
result = reduce(set_bit, enumerate(bitlist), long(0))
|
||||
|
||||
|
||||
#print("{0:016x}".format(result))
|
||||
return result
|
||||
"""
|
||||
|
||||
# accepts 2 hashes (longs or hex strings) and returns the hamming distance
|
||||
|
||||
T = TypeVar("T", int, str)
|
||||
|
||||
@staticmethod
|
||||
def hamming_distance(h1: T, h2: T) -> int:
|
||||
if isinstance(h1, int) or isinstance(h2, int):
|
||||
n1 = h1
|
||||
n2 = h2
|
||||
else:
|
||||
# convert hex strings to ints
|
||||
n1 = int(h1, 16)
|
||||
n2 = int(h2, 16)
|
||||
|
||||
# xor the two numbers
|
||||
n = n1 ^ n2
|
||||
|
||||
# count up the 1's in the binary string
|
||||
return sum(b == "1" for b in bin(n)[2:])
|
||||
85
comictaggerlib/imagepopup.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""A PyQT4 widget to display a popup image"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets, sip, uic
|
||||
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ImagePopup(QtWidgets.QDialog):
|
||||
def __init__(self, parent: QtWidgets.QWidget, image_pixmap: QtGui.QPixmap) -> None:
|
||||
super(ImagePopup, self).__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("imagepopup.ui"), self)
|
||||
|
||||
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
|
||||
|
||||
self.setWindowFlags(QtCore.Qt.WindowType.Popup)
|
||||
self.setWindowState(QtCore.Qt.WindowState.WindowFullScreen)
|
||||
|
||||
self.imagePixmap = image_pixmap
|
||||
|
||||
screen_size = QtGui.QGuiApplication.primaryScreen().geometry()
|
||||
QtWidgets.QApplication.primaryScreen()
|
||||
self.resize(screen_size.width(), screen_size.height())
|
||||
self.move(0, 0)
|
||||
|
||||
# This is a total hack. Uses a snapshot of the desktop, and overlays a
|
||||
# translucent screen over it. Probably can do it better by setting opacity of a widget
|
||||
# TODO: macOS denies this
|
||||
screen = QtWidgets.QApplication.primaryScreen()
|
||||
self.desktopBg = screen.grabWindow(sip.voidptr(0), 0, 0, screen_size.width(), screen_size.height())
|
||||
bg = QtGui.QPixmap(ComicTaggerSettings.get_graphic("popup_bg.png"))
|
||||
self.clientBgPixmap = bg.scaled(screen_size.width(), screen_size.height())
|
||||
self.setMask(self.clientBgPixmap.mask())
|
||||
|
||||
self.apply_image_pixmap()
|
||||
self.showFullScreen()
|
||||
self.raise_()
|
||||
QtWidgets.QApplication.restoreOverrideCursor()
|
||||
|
||||
def paintEvent(self, event: QtGui.QPaintEvent) -> None:
|
||||
painter = QtGui.QPainter(self)
|
||||
painter.setRenderHint(QtGui.QPainter.RenderHint.Antialiasing)
|
||||
painter.drawPixmap(0, 0, self.desktopBg)
|
||||
painter.drawPixmap(0, 0, self.clientBgPixmap)
|
||||
painter.end()
|
||||
|
||||
def apply_image_pixmap(self) -> None:
|
||||
win_h = self.height()
|
||||
win_w = self.width()
|
||||
|
||||
if self.imagePixmap.width() > win_w or self.imagePixmap.height() > win_h:
|
||||
# scale the pixmap to fit in the frame
|
||||
display_pixmap = self.imagePixmap.scaled(win_w, win_h, QtCore.Qt.AspectRatioMode.KeepAspectRatio)
|
||||
self.lblImage.setPixmap(display_pixmap)
|
||||
else:
|
||||
display_pixmap = self.imagePixmap
|
||||
self.lblImage.setPixmap(display_pixmap)
|
||||
|
||||
# move and resize the label to be centered in the fame
|
||||
img_w = display_pixmap.width()
|
||||
img_h = display_pixmap.height()
|
||||
self.lblImage.resize(img_w, img_h)
|
||||
self.lblImage.move(int((win_w - img_w) / 2), int((win_h - img_h) / 2))
|
||||
|
||||
def mousePressEvent(self, event: QtGui.QMouseEvent) -> None:
|
||||
self.close()
|
||||
692
comictaggerlib/issueidentifier.py
Normal file
@@ -0,0 +1,692 @@
|
||||
"""A class to automatically identify a comic archive"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import io
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, Callable, List, Optional
|
||||
|
||||
from typing_extensions import NotRequired, TypedDict
|
||||
|
||||
from comicapi import utils
|
||||
from comicapi.comicarchive import ComicArchive
|
||||
from comicapi.genericmetadata import GenericMetadata
|
||||
from comicapi.issuestring import IssueString
|
||||
from comictaggerlib.comicvinetalker import ComicVineTalker, ComicVineTalkerException
|
||||
from comictaggerlib.imagefetcher import ImageFetcher, ImageFetcherException
|
||||
from comictaggerlib.imagehasher import ImageHasher
|
||||
from comictaggerlib.resulttypes import IssueResult
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
|
||||
pil_available = True
|
||||
except ImportError:
|
||||
pil_available = False
|
||||
|
||||
|
||||
class SearchKeys(TypedDict):
|
||||
series: Optional[str]
|
||||
issue_number: Optional[str]
|
||||
month: Optional[int]
|
||||
year: Optional[int]
|
||||
issue_count: Optional[int]
|
||||
|
||||
|
||||
class Score(TypedDict):
|
||||
score: NotRequired[int]
|
||||
url: str
|
||||
hash: int
|
||||
|
||||
|
||||
class IssueIdentifierNetworkError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class IssueIdentifierCancelled(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class IssueIdentifier:
|
||||
result_no_matches = 0
|
||||
result_found_match_but_bad_cover_score = 1
|
||||
result_found_match_but_not_first_page = 2
|
||||
result_multiple_matches_with_bad_image_scores = 3
|
||||
result_one_good_match = 4
|
||||
result_multiple_good_matches = 5
|
||||
|
||||
def __init__(self, comic_archive: ComicArchive, settings: ComicTaggerSettings) -> None:
|
||||
self.settings = settings
|
||||
self.comic_archive: ComicArchive = comic_archive
|
||||
self.image_hasher = 1
|
||||
|
||||
self.only_use_additional_meta_data = False
|
||||
|
||||
# a decent hamming score, good enough to call it a match
|
||||
self.min_score_thresh: int = 16
|
||||
|
||||
# for alternate covers, be more stringent, since we're a bit more
|
||||
# scattershot in comparisons
|
||||
self.min_alternate_score_thresh = 12
|
||||
|
||||
# the min distance a hamming score must be to separate itself from
|
||||
# closest neighbor
|
||||
self.min_score_distance = 4
|
||||
|
||||
# a very strong hamming score, almost certainly the same image
|
||||
self.strong_score_thresh = 8
|
||||
|
||||
# used to eliminate series names that are too long based on our search
|
||||
# string
|
||||
self.length_delta_thresh = settings.id_length_delta_thresh
|
||||
|
||||
# used to eliminate unlikely publishers
|
||||
self.publisher_filter = [s.strip().lower() for s in settings.id_publisher_filter.split(",")]
|
||||
|
||||
self.additional_metadata = GenericMetadata()
|
||||
self.output_function: Callable[[str], None] = IssueIdentifier.default_write_output
|
||||
self.callback: Optional[Callable[[int, int], None]] = None
|
||||
self.cover_url_callback: Optional[Callable[[bytes], None]] = None
|
||||
self.search_result = self.result_no_matches
|
||||
self.cover_page_index = 0
|
||||
self.cancel = False
|
||||
self.wait_and_retry_on_rate_limit = False
|
||||
|
||||
self.match_list: list[IssueResult] = []
|
||||
|
||||
def set_score_min_threshold(self, thresh: int) -> None:
|
||||
self.min_score_thresh = thresh
|
||||
|
||||
def set_score_min_distance(self, distance: int) -> None:
|
||||
self.min_score_distance = distance
|
||||
|
||||
def set_additional_metadata(self, md: GenericMetadata) -> None:
|
||||
self.additional_metadata = md
|
||||
|
||||
def set_name_length_delta_threshold(self, delta: int) -> None:
|
||||
self.length_delta_thresh = delta
|
||||
|
||||
def set_publisher_filter(self, flt: List[str]) -> None:
|
||||
self.publisher_filter = flt
|
||||
|
||||
def set_hasher_algorithm(self, algo: int) -> None:
|
||||
self.image_hasher = algo
|
||||
|
||||
def set_output_function(self, func: Callable[[str], None]) -> None:
|
||||
self.output_function = func
|
||||
|
||||
def calculate_hash(self, image_data: bytes) -> int:
|
||||
if self.image_hasher == 3:
|
||||
return -1 # ImageHasher(data=image_data).dct_average_hash()
|
||||
if self.image_hasher == 2:
|
||||
return -1 # ImageHasher(data=image_data).average_hash2()
|
||||
|
||||
return ImageHasher(data=image_data).average_hash()
|
||||
|
||||
def get_aspect_ratio(self, image_data: bytes) -> float:
|
||||
try:
|
||||
im = Image.open(io.BytesIO(image_data))
|
||||
w, h = im.size
|
||||
return float(h) / float(w)
|
||||
except:
|
||||
return 1.5
|
||||
|
||||
def crop_cover(self, image_data: bytes) -> bytes:
|
||||
|
||||
im = Image.open(io.BytesIO(image_data))
|
||||
w, h = im.size
|
||||
|
||||
try:
|
||||
cropped_im = im.crop((int(w / 2), 0, w, h))
|
||||
except:
|
||||
logger.exception("cropCover() error")
|
||||
return bytes()
|
||||
|
||||
output = io.BytesIO()
|
||||
cropped_im.save(output, format="PNG")
|
||||
cropped_image_data = output.getvalue()
|
||||
output.close()
|
||||
|
||||
return cropped_image_data
|
||||
|
||||
def set_progress_callback(self, cb_func: Callable[[int, int], None]) -> None:
|
||||
self.callback = cb_func
|
||||
|
||||
def set_cover_url_callback(self, cb_func: Callable[[bytes], None]) -> None:
|
||||
self.cover_url_callback = cb_func
|
||||
|
||||
def get_search_keys(self) -> SearchKeys:
|
||||
|
||||
ca = self.comic_archive
|
||||
search_keys: SearchKeys = {
|
||||
"series": None,
|
||||
"issue_number": None,
|
||||
"month": None,
|
||||
"year": None,
|
||||
"issue_count": None,
|
||||
}
|
||||
|
||||
if ca is None:
|
||||
return None
|
||||
|
||||
if self.only_use_additional_meta_data:
|
||||
search_keys["series"] = self.additional_metadata.series
|
||||
search_keys["issue_number"] = self.additional_metadata.issue
|
||||
search_keys["year"] = self.additional_metadata.year
|
||||
search_keys["month"] = self.additional_metadata.month
|
||||
search_keys["issue_count"] = self.additional_metadata.issue_count
|
||||
return search_keys
|
||||
|
||||
# see if the archive has any useful meta data for searching with
|
||||
if ca.has_cix():
|
||||
internal_metadata = ca.read_cix()
|
||||
elif ca.has_cbi():
|
||||
internal_metadata = ca.read_cbi()
|
||||
else:
|
||||
internal_metadata = ca.read_cbi()
|
||||
|
||||
# try to get some metadata from filename
|
||||
md_from_filename = ca.metadata_from_filename(
|
||||
self.settings.complicated_parser,
|
||||
self.settings.remove_c2c,
|
||||
self.settings.remove_fcbd,
|
||||
self.settings.remove_publisher,
|
||||
)
|
||||
|
||||
# preference order:
|
||||
# 1. Additional metadata
|
||||
# 1. Internal metadata
|
||||
# 1. Filename metadata
|
||||
|
||||
if self.additional_metadata.series is not None:
|
||||
search_keys["series"] = self.additional_metadata.series
|
||||
elif internal_metadata.series is not None:
|
||||
search_keys["series"] = internal_metadata.series
|
||||
else:
|
||||
search_keys["series"] = md_from_filename.series
|
||||
|
||||
if self.additional_metadata.issue is not None:
|
||||
search_keys["issue_number"] = self.additional_metadata.issue
|
||||
elif internal_metadata.issue is not None:
|
||||
search_keys["issue_number"] = internal_metadata.issue
|
||||
else:
|
||||
search_keys["issue_number"] = md_from_filename.issue
|
||||
|
||||
if self.additional_metadata.year is not None:
|
||||
search_keys["year"] = self.additional_metadata.year
|
||||
elif internal_metadata.year is not None:
|
||||
search_keys["year"] = internal_metadata.year
|
||||
else:
|
||||
search_keys["year"] = md_from_filename.year
|
||||
|
||||
if self.additional_metadata.month is not None:
|
||||
search_keys["month"] = self.additional_metadata.month
|
||||
elif internal_metadata.month is not None:
|
||||
search_keys["month"] = internal_metadata.month
|
||||
else:
|
||||
search_keys["month"] = md_from_filename.month
|
||||
|
||||
if self.additional_metadata.issue_count is not None:
|
||||
search_keys["issue_count"] = self.additional_metadata.issue_count
|
||||
elif internal_metadata.issue_count is not None:
|
||||
search_keys["issue_count"] = internal_metadata.issue_count
|
||||
else:
|
||||
search_keys["issue_count"] = md_from_filename.issue_count
|
||||
|
||||
return search_keys
|
||||
|
||||
@staticmethod
|
||||
def default_write_output(text: str) -> None:
|
||||
sys.stdout.write(text)
|
||||
sys.stdout.flush()
|
||||
|
||||
def log_msg(self, msg: Any, newline: bool = True) -> None:
|
||||
msg = str(msg)
|
||||
if newline:
|
||||
msg += "\n"
|
||||
self.output_function(msg)
|
||||
|
||||
def get_issue_cover_match_score(
|
||||
self,
|
||||
comic_vine: ComicVineTalker,
|
||||
issue_id: int,
|
||||
primary_img_url: str,
|
||||
primary_thumb_url: str,
|
||||
page_url: str,
|
||||
local_cover_hash_list: list[int],
|
||||
use_remote_alternates: bool = False,
|
||||
use_log: bool = True,
|
||||
) -> Score:
|
||||
# local_cover_hash_list is a list of pre-calculated hashs.
|
||||
# use_remote_alternates - indicates to use alternate covers from CV
|
||||
|
||||
try:
|
||||
url_image_data = ImageFetcher().fetch(primary_thumb_url, blocking=True)
|
||||
except ImageFetcherException as e:
|
||||
self.log_msg("Network issue while fetching cover image from Comic Vine. Aborting...")
|
||||
raise IssueIdentifierNetworkError from e
|
||||
|
||||
if self.cancel:
|
||||
raise IssueIdentifierCancelled
|
||||
|
||||
# alert the GUI, if needed
|
||||
if self.cover_url_callback is not None:
|
||||
self.cover_url_callback(url_image_data)
|
||||
|
||||
remote_cover_list = []
|
||||
|
||||
remote_cover_list.append(Score({"url": primary_img_url, "hash": self.calculate_hash(url_image_data)}))
|
||||
|
||||
if self.cancel:
|
||||
raise IssueIdentifierCancelled
|
||||
|
||||
if use_remote_alternates:
|
||||
alt_img_url_list = comic_vine.fetch_alternate_cover_urls(issue_id, page_url)
|
||||
for alt_url in alt_img_url_list:
|
||||
try:
|
||||
alt_url_image_data = ImageFetcher().fetch(alt_url, blocking=True)
|
||||
except ImageFetcherException as e:
|
||||
self.log_msg("Network issue while fetching alt. cover image from Comic Vine. Aborting...")
|
||||
raise IssueIdentifierNetworkError from e
|
||||
|
||||
if self.cancel:
|
||||
raise IssueIdentifierCancelled
|
||||
|
||||
# alert the GUI, if needed
|
||||
if self.cover_url_callback is not None:
|
||||
self.cover_url_callback(alt_url_image_data)
|
||||
|
||||
remote_cover_list.append(Score({"url": alt_url, "hash": self.calculate_hash(alt_url_image_data)}))
|
||||
|
||||
if self.cancel:
|
||||
raise IssueIdentifierCancelled
|
||||
|
||||
if use_log and use_remote_alternates:
|
||||
self.log_msg(f"[{len(remote_cover_list) - 1} alt. covers]", False)
|
||||
if use_log:
|
||||
self.log_msg("[ ", False)
|
||||
|
||||
score_list = []
|
||||
done = False
|
||||
for local_cover_hash in local_cover_hash_list:
|
||||
for remote_cover_item in remote_cover_list:
|
||||
score = ImageHasher.hamming_distance(local_cover_hash, remote_cover_item["hash"])
|
||||
score_list.append(
|
||||
Score({"score": score, "url": remote_cover_item["url"], "hash": remote_cover_item["hash"]})
|
||||
)
|
||||
if use_log:
|
||||
self.log_msg(score, False)
|
||||
|
||||
if score <= self.strong_score_thresh:
|
||||
# such a good score, we can quit now, since for sure we
|
||||
# have a winner
|
||||
done = True
|
||||
break
|
||||
if done:
|
||||
break
|
||||
|
||||
if use_log:
|
||||
self.log_msg(" ]", False)
|
||||
|
||||
best_score_item = min(score_list, key=lambda x: x["score"])
|
||||
|
||||
return best_score_item
|
||||
|
||||
def search(self) -> List[IssueResult]:
|
||||
ca = self.comic_archive
|
||||
self.match_list = []
|
||||
self.cancel = False
|
||||
self.search_result = self.result_no_matches
|
||||
|
||||
if not pil_available:
|
||||
self.log_msg("Python Imaging Library (PIL) is not available and is needed for issue identification.")
|
||||
return self.match_list
|
||||
|
||||
if not ca.seems_to_be_a_comic_archive():
|
||||
self.log_msg(f"Sorry, but {ca.path} is not a comic archive!")
|
||||
return self.match_list
|
||||
|
||||
cover_image_data = ca.get_page(self.cover_page_index)
|
||||
cover_hash = self.calculate_hash(cover_image_data)
|
||||
|
||||
# check the aspect ratio
|
||||
# if it's wider than it is high, it's probably a two page spread
|
||||
# if so, crop it and calculate a second hash
|
||||
narrow_cover_hash = None
|
||||
aspect_ratio = self.get_aspect_ratio(cover_image_data)
|
||||
if aspect_ratio < 1.0:
|
||||
right_side_image_data = self.crop_cover(cover_image_data)
|
||||
if right_side_image_data is not None:
|
||||
narrow_cover_hash = self.calculate_hash(right_side_image_data)
|
||||
|
||||
keys = self.get_search_keys()
|
||||
# normalize the issue number
|
||||
keys["issue_number"] = IssueString(keys["issue_number"]).as_string()
|
||||
|
||||
# we need, at minimum, a series and issue number
|
||||
if keys["series"] is None or keys["issue_number"] is None:
|
||||
self.log_msg("Not enough info for a search!")
|
||||
return []
|
||||
|
||||
self.log_msg("Going to search for:")
|
||||
self.log_msg("\tSeries: " + keys["series"])
|
||||
self.log_msg("\tIssue: " + keys["issue_number"])
|
||||
if keys["issue_count"] is not None:
|
||||
self.log_msg("\tCount: " + str(keys["issue_count"]))
|
||||
if keys["year"] is not None:
|
||||
self.log_msg("\tYear: " + str(keys["year"]))
|
||||
if keys["month"] is not None:
|
||||
self.log_msg("\tMonth: " + str(keys["month"]))
|
||||
|
||||
comic_vine = ComicVineTalker()
|
||||
comic_vine.wait_for_rate_limit = self.wait_and_retry_on_rate_limit
|
||||
|
||||
comic_vine.set_log_func(self.output_function)
|
||||
|
||||
self.log_msg(f"Searching for {keys['series']} #{keys['issue_number']} ...")
|
||||
try:
|
||||
cv_search_results = comic_vine.search_for_series(keys["series"])
|
||||
except ComicVineTalkerException:
|
||||
self.log_msg("Network issue while searching for series. Aborting...")
|
||||
return []
|
||||
|
||||
if self.cancel:
|
||||
return []
|
||||
|
||||
if cv_search_results is None:
|
||||
return []
|
||||
|
||||
series_second_round_list = []
|
||||
|
||||
for item in cv_search_results:
|
||||
length_approved = False
|
||||
publisher_approved = True
|
||||
date_approved = True
|
||||
|
||||
# remove any series that starts after the issue year
|
||||
if (
|
||||
keys["year"] is not None
|
||||
and str(keys["year"]).isdigit()
|
||||
and item["start_year"] is not None
|
||||
and str(item["start_year"]).isdigit()
|
||||
):
|
||||
if int(keys["year"]) < int(item["start_year"]):
|
||||
date_approved = False
|
||||
|
||||
# assume that our search name is close to the actual name, say
|
||||
# within ,e.g. 5 chars
|
||||
# sanitize both the search string and the result so that
|
||||
# we are comparing the same type of data
|
||||
shortened_key = utils.sanitize_title(keys["series"])
|
||||
shortened_item_name = utils.sanitize_title(item["name"])
|
||||
if len(shortened_item_name) < (len(shortened_key) + self.length_delta_thresh):
|
||||
length_approved = True
|
||||
|
||||
# remove any series from publishers on the filter
|
||||
if item["publisher"] is not None:
|
||||
publisher = item["publisher"]["name"]
|
||||
if publisher is not None and publisher.lower() in self.publisher_filter:
|
||||
publisher_approved = False
|
||||
|
||||
if length_approved and publisher_approved and date_approved:
|
||||
series_second_round_list.append(item)
|
||||
|
||||
self.log_msg("Searching in " + str(len(series_second_round_list)) + " series")
|
||||
|
||||
if self.callback is not None:
|
||||
self.callback(0, len(series_second_round_list))
|
||||
|
||||
# now sort the list by name length
|
||||
series_second_round_list.sort(key=lambda x: len(x["name"]), reverse=False)
|
||||
|
||||
# build a list of volume IDs
|
||||
volume_id_list = []
|
||||
for series in series_second_round_list:
|
||||
volume_id_list.append(series["id"])
|
||||
|
||||
issue_list = None
|
||||
try:
|
||||
if len(volume_id_list) > 0:
|
||||
issue_list = comic_vine.fetch_issues_by_volume_issue_num_and_year(
|
||||
volume_id_list, keys["issue_number"], keys["year"]
|
||||
)
|
||||
|
||||
except ComicVineTalkerException:
|
||||
self.log_msg("Network issue while searching for series details. Aborting...")
|
||||
return []
|
||||
|
||||
if issue_list is None:
|
||||
return []
|
||||
|
||||
shortlist = []
|
||||
# now re-associate the issues and volumes
|
||||
for issue in issue_list:
|
||||
for series in series_second_round_list:
|
||||
if series["id"] == issue["volume"]["id"]:
|
||||
shortlist.append((series, issue))
|
||||
break
|
||||
|
||||
if keys["year"] is None:
|
||||
self.log_msg(f"Found {len(shortlist)} series that have an issue #{keys['issue_number']}")
|
||||
else:
|
||||
self.log_msg(
|
||||
f"Found {len(shortlist)} series that have an issue #{keys['issue_number']} from {keys['year']}"
|
||||
)
|
||||
|
||||
# now we have a shortlist of volumes with the desired issue number
|
||||
# Do first round of cover matching
|
||||
counter = len(shortlist)
|
||||
for series, issue in shortlist:
|
||||
if self.callback is not None:
|
||||
self.callback(counter, len(shortlist) * 3)
|
||||
counter += 1
|
||||
|
||||
self.log_msg(
|
||||
f"Examining covers for ID: {series['id']} {series['name']} ({series['start_year']}) ...",
|
||||
newline=False,
|
||||
)
|
||||
|
||||
# parse out the cover date
|
||||
_, month, year = comic_vine.parse_date_str(issue["cover_date"])
|
||||
|
||||
# Now check the cover match against the primary image
|
||||
hash_list = [cover_hash]
|
||||
if narrow_cover_hash is not None:
|
||||
hash_list.append(narrow_cover_hash)
|
||||
|
||||
try:
|
||||
image_url = issue["image"]["super_url"]
|
||||
thumb_url = issue["image"]["thumb_url"]
|
||||
page_url = issue["site_detail_url"]
|
||||
|
||||
score_item = self.get_issue_cover_match_score(
|
||||
comic_vine,
|
||||
issue["id"],
|
||||
image_url,
|
||||
thumb_url,
|
||||
page_url,
|
||||
hash_list,
|
||||
use_remote_alternates=False,
|
||||
)
|
||||
except:
|
||||
self.match_list = []
|
||||
return self.match_list
|
||||
|
||||
match: IssueResult = {
|
||||
"series": f"{series['name']} ({series['start_year']})",
|
||||
"distance": score_item["score"],
|
||||
"issue_number": keys["issue_number"],
|
||||
"cv_issue_count": series["count_of_issues"],
|
||||
"url_image_hash": score_item["hash"],
|
||||
"issue_title": issue["name"],
|
||||
"issue_id": issue["id"],
|
||||
"volume_id": series["id"],
|
||||
"month": month,
|
||||
"year": year,
|
||||
"publisher": None,
|
||||
"image_url": image_url,
|
||||
"thumb_url": thumb_url,
|
||||
"page_url": page_url,
|
||||
"description": issue["description"],
|
||||
}
|
||||
if series["publisher"] is not None:
|
||||
match["publisher"] = series["publisher"]["name"]
|
||||
|
||||
self.match_list.append(match)
|
||||
|
||||
self.log_msg(f" --> {match['distance']}", newline=False)
|
||||
|
||||
self.log_msg("")
|
||||
|
||||
if len(self.match_list) == 0:
|
||||
self.log_msg(":-(no matches!")
|
||||
self.search_result = self.result_no_matches
|
||||
return self.match_list
|
||||
|
||||
# sort list by image match scores
|
||||
self.match_list.sort(key=lambda k: k["distance"])
|
||||
|
||||
lst = []
|
||||
for i in self.match_list:
|
||||
lst.append(i["distance"])
|
||||
|
||||
self.log_msg(f"Compared to covers in {len(self.match_list)} issue(s):", newline=False)
|
||||
self.log_msg(str(lst))
|
||||
|
||||
def print_match(item: IssueResult) -> None:
|
||||
self.log_msg(
|
||||
"-----> {} #{} {} ({}/{}) -- score: {}".format(
|
||||
item["series"],
|
||||
item["issue_number"],
|
||||
item["issue_title"],
|
||||
item["month"],
|
||||
item["year"],
|
||||
item["distance"],
|
||||
)
|
||||
)
|
||||
|
||||
best_score: int = self.match_list[0]["distance"]
|
||||
|
||||
if best_score >= self.min_score_thresh:
|
||||
# we have 1 or more low-confidence matches (all bad cover scores)
|
||||
# look at a few more pages in the archive, and also alternate covers online
|
||||
self.log_msg("Very weak scores for the cover. Analyzing alternate pages and covers...")
|
||||
hash_list = [cover_hash]
|
||||
if narrow_cover_hash is not None:
|
||||
hash_list.append(narrow_cover_hash)
|
||||
for page_index in range(1, min(3, ca.get_number_of_pages())):
|
||||
image_data = ca.get_page(page_index)
|
||||
page_hash = self.calculate_hash(image_data)
|
||||
hash_list.append(page_hash)
|
||||
|
||||
second_match_list = []
|
||||
counter = 2 * len(self.match_list)
|
||||
for m in self.match_list:
|
||||
if self.callback is not None:
|
||||
self.callback(counter, len(self.match_list) * 3)
|
||||
counter += 1
|
||||
self.log_msg(f"Examining alternate covers for ID: {m['volume_id']} {m['series']} ...", newline=False)
|
||||
try:
|
||||
score_item = self.get_issue_cover_match_score(
|
||||
comic_vine,
|
||||
m["issue_id"],
|
||||
m["image_url"],
|
||||
m["thumb_url"],
|
||||
m["page_url"],
|
||||
hash_list,
|
||||
use_remote_alternates=True,
|
||||
)
|
||||
except:
|
||||
self.match_list = []
|
||||
return self.match_list
|
||||
self.log_msg(f"--->{score_item['score']}")
|
||||
self.log_msg("")
|
||||
|
||||
if score_item["score"] < self.min_alternate_score_thresh:
|
||||
second_match_list.append(m)
|
||||
m["distance"] = score_item["score"]
|
||||
|
||||
if len(second_match_list) == 0:
|
||||
if len(self.match_list) == 1:
|
||||
self.log_msg("No matching pages in the issue.")
|
||||
self.log_msg("--------------------------------------------------------------------------")
|
||||
print_match(self.match_list[0])
|
||||
self.log_msg("--------------------------------------------------------------------------")
|
||||
self.search_result = self.result_found_match_but_bad_cover_score
|
||||
else:
|
||||
self.log_msg("--------------------------------------------------------------------------")
|
||||
self.log_msg("Multiple bad cover matches! Need to use other info...")
|
||||
self.log_msg("--------------------------------------------------------------------------")
|
||||
self.search_result = self.result_multiple_matches_with_bad_image_scores
|
||||
return self.match_list
|
||||
|
||||
# We did good, found something!
|
||||
self.log_msg("Success in secondary/alternate cover matching!")
|
||||
|
||||
self.match_list = second_match_list
|
||||
# sort new list by image match scores
|
||||
self.match_list.sort(key=lambda k: k["distance"])
|
||||
best_score = self.match_list[0]["distance"]
|
||||
self.log_msg("[Second round cover matching: best score = {best_score}]")
|
||||
# now drop down into the rest of the processing
|
||||
|
||||
if self.callback is not None:
|
||||
self.callback(99, 100)
|
||||
|
||||
# now pare down list, remove any item more than specified distant from the top scores
|
||||
for match_item in reversed(self.match_list):
|
||||
if match_item["distance"] > best_score + self.min_score_distance:
|
||||
self.match_list.remove(match_item)
|
||||
|
||||
# One more test for the case choosing limited series first issue vs a trade with the same cover:
|
||||
# if we have a given issue count > 1 and the volume from CV has count==1, remove it from match list
|
||||
if len(self.match_list) >= 2 and keys["issue_count"] is not None and keys["issue_count"] != 1:
|
||||
new_list = []
|
||||
for match in self.match_list:
|
||||
if match["cv_issue_count"] != 1:
|
||||
new_list.append(match)
|
||||
else:
|
||||
self.log_msg(
|
||||
f"Removing volume {match['series']} [{match['volume_id']}] from consideration (only 1 issue)"
|
||||
)
|
||||
|
||||
if len(new_list) > 0:
|
||||
self.match_list = new_list
|
||||
|
||||
if len(self.match_list) == 1:
|
||||
self.log_msg("--------------------------------------------------------------------------")
|
||||
print_match(self.match_list[0])
|
||||
self.log_msg("--------------------------------------------------------------------------")
|
||||
self.search_result = self.result_one_good_match
|
||||
|
||||
elif len(self.match_list) == 0:
|
||||
self.log_msg("--------------------------------------------------------------------------")
|
||||
self.log_msg("No matches found :(")
|
||||
self.log_msg("--------------------------------------------------------------------------")
|
||||
self.search_result = self.result_no_matches
|
||||
else:
|
||||
# we've got multiple good matches:
|
||||
self.log_msg("More than one likely candidate.")
|
||||
self.search_result = self.result_multiple_good_matches
|
||||
self.log_msg("--------------------------------------------------------------------------")
|
||||
for match_item in self.match_list:
|
||||
print_match(match_item)
|
||||
self.log_msg("--------------------------------------------------------------------------")
|
||||
|
||||
return self.match_list
|
||||
185
comictaggerlib/issueselectionwindow.py
Normal file
@@ -0,0 +1,185 @@
|
||||
"""A PyQT4 dialog to select specific issue from list"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets, uic
|
||||
|
||||
from comicapi.issuestring import IssueString
|
||||
from comictaggerlib.comicvinetalker import ComicVineTalker, ComicVineTalkerException
|
||||
from comictaggerlib.coverimagewidget import CoverImageWidget
|
||||
from comictaggerlib.resulttypes import CVIssuesResults
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
from comictaggerlib.ui.qtutils import reduce_widget_font_size
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IssueNumberTableWidgetItem(QtWidgets.QTableWidgetItem):
|
||||
def __lt__(self, other: object) -> bool:
|
||||
assert isinstance(other, QtWidgets.QTableWidgetItem)
|
||||
self_str: str = self.data(QtCore.Qt.ItemDataRole.DisplayRole)
|
||||
other_str: str = other.data(QtCore.Qt.ItemDataRole.DisplayRole)
|
||||
return (IssueString(self_str).as_float() or 0) < (IssueString(other_str).as_float() or 0)
|
||||
|
||||
|
||||
class IssueSelectionWindow(QtWidgets.QDialog):
|
||||
volume_id = 0
|
||||
|
||||
def __init__(
|
||||
self, parent: QtWidgets.QWidget, settings: ComicTaggerSettings, series_id: int, issue_number: str
|
||||
) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("issueselectionwindow.ui"), self)
|
||||
|
||||
self.coverWidget = CoverImageWidget(self.coverImageContainer, CoverImageWidget.AltCoverMode)
|
||||
gridlayout = QtWidgets.QGridLayout(self.coverImageContainer)
|
||||
gridlayout.addWidget(self.coverWidget)
|
||||
gridlayout.setContentsMargins(0, 0, 0, 0)
|
||||
|
||||
reduce_widget_font_size(self.twList)
|
||||
reduce_widget_font_size(self.teDescription, 1)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowType(
|
||||
self.windowFlags()
|
||||
| QtCore.Qt.WindowType.WindowSystemMenuHint
|
||||
| QtCore.Qt.WindowType.WindowMaximizeButtonHint
|
||||
)
|
||||
)
|
||||
|
||||
self.series_id = series_id
|
||||
self.issue_id: Optional[int] = None
|
||||
self.settings = settings
|
||||
self.url_fetch_thread = None
|
||||
self.issue_list: list[CVIssuesResults] = []
|
||||
|
||||
if issue_number is None or issue_number == "":
|
||||
self.issue_number = "1"
|
||||
else:
|
||||
self.issue_number = issue_number
|
||||
|
||||
self.initial_id: Optional[int] = None
|
||||
self.perform_query()
|
||||
|
||||
self.twList.resizeColumnsToContents()
|
||||
self.twList.currentItemChanged.connect(self.current_item_changed)
|
||||
self.twList.cellDoubleClicked.connect(self.cell_double_clicked)
|
||||
|
||||
# now that the list has been sorted, find the initial record, and
|
||||
# select it
|
||||
if self.initial_id is None:
|
||||
self.twList.selectRow(0)
|
||||
else:
|
||||
for r in range(0, self.twList.rowCount()):
|
||||
issue_id = self.twList.item(r, 0).data(QtCore.Qt.ItemDataRole.UserRole)
|
||||
if issue_id == self.initial_id:
|
||||
self.twList.selectRow(r)
|
||||
break
|
||||
|
||||
def perform_query(self) -> None:
|
||||
|
||||
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
|
||||
|
||||
try:
|
||||
comic_vine = ComicVineTalker()
|
||||
comic_vine.fetch_volume_data(self.series_id)
|
||||
self.issue_list = comic_vine.fetch_issues_by_volume(self.series_id)
|
||||
except ComicVineTalkerException as e:
|
||||
QtWidgets.QApplication.restoreOverrideCursor()
|
||||
if e.code == ComicVineTalkerException.RateLimit:
|
||||
QtWidgets.QMessageBox.critical(self, "Comic Vine Error", ComicVineTalker.get_rate_limit_message())
|
||||
else:
|
||||
QtWidgets.QMessageBox.critical(self, "Network Issue", "Could not connect to Comic Vine to list issues!")
|
||||
return
|
||||
|
||||
while self.twList.rowCount() > 0:
|
||||
self.twList.removeRow(0)
|
||||
|
||||
self.twList.setSortingEnabled(False)
|
||||
|
||||
row = 0
|
||||
for record in self.issue_list:
|
||||
self.twList.insertRow(row)
|
||||
|
||||
item_text = record["issue_number"]
|
||||
item = IssueNumberTableWidgetItem(item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.UserRole, record["id"])
|
||||
item.setData(QtCore.Qt.ItemDataRole.DisplayRole, item_text)
|
||||
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 0, item)
|
||||
|
||||
item_text = record["cover_date"]
|
||||
if item_text is None:
|
||||
item_text = ""
|
||||
# remove the day of "YYYY-MM-DD"
|
||||
parts = item_text.split("-")
|
||||
if len(parts) > 1:
|
||||
item_text = parts[0] + "-" + parts[1]
|
||||
|
||||
QTW_item = QtWidgets.QTableWidgetItem(item_text)
|
||||
QTW_item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
QTW_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 1, QTW_item)
|
||||
|
||||
item_text = record["name"]
|
||||
if item_text is None:
|
||||
item_text = ""
|
||||
QTW_item = QtWidgets.QTableWidgetItem(item_text)
|
||||
QTW_item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
QTW_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 2, QTW_item)
|
||||
|
||||
if (
|
||||
IssueString(record["issue_number"]).as_string().lower()
|
||||
== IssueString(self.issue_number).as_string().lower()
|
||||
):
|
||||
self.initial_id = record["id"]
|
||||
|
||||
row += 1
|
||||
|
||||
self.twList.setSortingEnabled(True)
|
||||
self.twList.sortItems(0, QtCore.Qt.SortOrder.AscendingOrder)
|
||||
|
||||
QtWidgets.QApplication.restoreOverrideCursor()
|
||||
|
||||
def cell_double_clicked(self, r: int, c: int) -> None:
|
||||
self.accept()
|
||||
|
||||
def current_item_changed(self, curr: Optional[QtCore.QModelIndex], prev: Optional[QtCore.QModelIndex]) -> None:
|
||||
|
||||
if curr is None:
|
||||
return
|
||||
if prev is not None and prev.row() == curr.row():
|
||||
return
|
||||
|
||||
self.issue_id = self.twList.item(curr.row(), 0).data(QtCore.Qt.ItemDataRole.UserRole)
|
||||
|
||||
# list selection was changed, update the the issue cover
|
||||
for record in self.issue_list:
|
||||
if record["id"] == self.issue_id:
|
||||
self.issue_number = record["issue_number"]
|
||||
self.coverWidget.set_issue_id(self.issue_id)
|
||||
if record["description"] is None:
|
||||
self.teDescription.setText("")
|
||||
else:
|
||||
self.teDescription.setText(record["description"])
|
||||
|
||||
break
|
||||
53
comictaggerlib/logwindow.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""A PyQT4 dialog to a text file or log"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
from typing import Union
|
||||
|
||||
from PyQt5 import QtCore, QtWidgets, uic
|
||||
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
from comictaggerlib.ui import qtutils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LogWindow(QtWidgets.QDialog):
|
||||
def __init__(self, parent: QtWidgets.QWidget) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("logwindow.ui"), self)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowType(
|
||||
self.windowFlags()
|
||||
| QtCore.Qt.WindowType.WindowSystemMenuHint
|
||||
| QtCore.Qt.WindowType.WindowMaximizeButtonHint
|
||||
)
|
||||
)
|
||||
|
||||
def set_text(self, text: Union[str, bytes, None]) -> None:
|
||||
try:
|
||||
if text is not None:
|
||||
if isinstance(text, bytes):
|
||||
text = text.decode("utf-8")
|
||||
self.textEdit.setPlainText(text)
|
||||
except AttributeError:
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.exception("Displaying raw tags failed")
|
||||
qtutils.qt_error("Displaying raw tags failed:", e)
|
||||
217
comictaggerlib/main.py
Executable file
@@ -0,0 +1,217 @@
|
||||
"""A python app to (automatically) tag comic archives"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
from typing import Optional
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from comicapi import utils
|
||||
from comictaggerlib import cli
|
||||
from comictaggerlib.comicvinetalker import ComicVineTalker
|
||||
from comictaggerlib.ctversion import version
|
||||
from comictaggerlib.options import parse_cmd_line
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger("comictagger")
|
||||
logging.getLogger("comicapi").setLevel(logging.DEBUG)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
try:
|
||||
qt_available = True
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||
|
||||
def show_exception_box(log_msg: str) -> None:
|
||||
"""Checks if a QApplication instance is available and shows a messagebox with the exception message.
|
||||
If unavailable (non-console application), log an additional notice.
|
||||
"""
|
||||
if QtWidgets.QApplication.instance() is not None:
|
||||
errorbox = QtWidgets.QMessageBox()
|
||||
errorbox.setText(f"Oops. An unexpected error occured:\n{log_msg}")
|
||||
errorbox.exec()
|
||||
QtWidgets.QApplication.exit(1)
|
||||
else:
|
||||
logger.debug("No QApplication instance available.")
|
||||
|
||||
class UncaughtHook(QtCore.QObject):
|
||||
_exception_caught = QtCore.pyqtSignal(object)
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
# this registers the exception_hook() function as hook with the Python interpreter
|
||||
sys.excepthook = self.exception_hook
|
||||
|
||||
# connect signal to execute the message box function always on main thread
|
||||
self._exception_caught.connect(show_exception_box)
|
||||
|
||||
def exception_hook(
|
||||
self, exc_type: type[BaseException], exc_value: BaseException, exc_traceback: Optional[types.TracebackType]
|
||||
) -> None:
|
||||
"""Function handling uncaught exceptions.
|
||||
It is triggered each time an uncaught exception occurs.
|
||||
"""
|
||||
if issubclass(exc_type, KeyboardInterrupt):
|
||||
# ignore keyboard interrupt to support console applications
|
||||
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
||||
else:
|
||||
exc_info = (exc_type, exc_value, exc_traceback)
|
||||
log_msg = "\n".join(["".join(traceback.format_tb(exc_traceback)), f"{exc_type.__name__}: {exc_value}"])
|
||||
logger.critical("Uncaught exception: %s: %s", exc_type.__name__, exc_value, exc_info=exc_info)
|
||||
|
||||
# trigger message box show
|
||||
self._exception_caught.emit(log_msg)
|
||||
|
||||
qt_exception_hook = UncaughtHook()
|
||||
from comictaggerlib.taggerwindow import TaggerWindow
|
||||
except ImportError as e:
|
||||
|
||||
def show_exception_box(log_msg: str) -> None:
|
||||
pass
|
||||
|
||||
logger.error(str(e))
|
||||
qt_available = False
|
||||
|
||||
|
||||
def rotate(handler: logging.handlers.RotatingFileHandler, filename: pathlib.Path) -> None:
|
||||
if filename.is_file() and filename.stat().st_size > 0:
|
||||
handler.doRollover()
|
||||
|
||||
|
||||
def update_publishers() -> None:
|
||||
json_file = ComicTaggerSettings.get_settings_folder() / "publishers.json"
|
||||
if json_file.exists():
|
||||
try:
|
||||
utils.update_publishers(json.loads(json_file.read_text("utf-8")))
|
||||
except Exception as e:
|
||||
logger.exception("Failed to load publishers from %s", json_file)
|
||||
show_exception_box(str(e))
|
||||
|
||||
|
||||
def ctmain() -> None:
|
||||
opts = parse_cmd_line()
|
||||
SETTINGS = ComicTaggerSettings(opts.config_path)
|
||||
|
||||
os.makedirs(ComicTaggerSettings.get_settings_folder() / "logs", exist_ok=True)
|
||||
stream_handler = logging.StreamHandler()
|
||||
stream_handler.setLevel(logging.WARNING)
|
||||
file_handler = logging.handlers.RotatingFileHandler(
|
||||
ComicTaggerSettings.get_settings_folder() / "logs" / "ComicTagger.log", encoding="utf-8", backupCount=10
|
||||
)
|
||||
rotate(file_handler, ComicTaggerSettings.get_settings_folder() / "logs" / "ComicTagger.log")
|
||||
logging.basicConfig(
|
||||
handlers=[
|
||||
stream_handler,
|
||||
file_handler,
|
||||
],
|
||||
level=logging.WARNING,
|
||||
format="%(asctime)s | %(name)s | %(levelname)s | %(message)s",
|
||||
datefmt="%Y-%m-%dT%H:%M:%S",
|
||||
)
|
||||
# Need to load setting before anything else
|
||||
|
||||
# manage the CV API key
|
||||
if opts.cv_api_key:
|
||||
if opts.cv_api_key != SETTINGS.cv_api_key:
|
||||
SETTINGS.cv_api_key = opts.cv_api_key
|
||||
SETTINGS.save()
|
||||
if opts.only_set_cv_key:
|
||||
print("Key set")
|
||||
return
|
||||
|
||||
ComicVineTalker.api_key = SETTINGS.cv_api_key
|
||||
|
||||
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
||||
|
||||
logger.info(
|
||||
"ComicTagger Version: %s running on: %s PyInstaller: %s",
|
||||
version,
|
||||
platform.system(),
|
||||
"Yes" if getattr(sys, "frozen", None) else "No",
|
||||
)
|
||||
|
||||
logger.debug("Installed Packages")
|
||||
for pkg in sorted(pkg_resources.working_set, key=lambda x: x.project_name):
|
||||
logger.debug("%s\t%s", pkg.project_name, pkg.version)
|
||||
|
||||
utils.load_publishers()
|
||||
update_publishers()
|
||||
|
||||
if not qt_available and not opts.no_gui:
|
||||
opts.no_gui = True
|
||||
print("PyQt5 is not available. ComicTagger is limited to command-line mode.")
|
||||
logger.info("PyQt5 is not available. ComicTagger is limited to command-line mode.")
|
||||
|
||||
if opts.no_gui:
|
||||
try:
|
||||
cli.cli_mode(opts, SETTINGS)
|
||||
except:
|
||||
logger.exception("CLI mode failed")
|
||||
else:
|
||||
os.environ["QtWidgets.QT_AUTO_SCREEN_SCALE_FACTOR"] = "1"
|
||||
args = []
|
||||
if opts.darkmode:
|
||||
args.extend(["-platform", "windows:darkmode=2"])
|
||||
args.extend(sys.argv)
|
||||
app = QtWidgets.QApplication(args)
|
||||
if platform.system() == "Darwin":
|
||||
# Set the MacOS dock icon
|
||||
app.setWindowIcon(QtGui.QIcon(ComicTaggerSettings.get_graphic("app.png")))
|
||||
|
||||
if platform.system() == "Windows":
|
||||
# For pure python, tell windows that we're not python,
|
||||
# so we can have our own taskbar icon
|
||||
import ctypes
|
||||
|
||||
myappid = "comictagger" # arbitrary string
|
||||
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid)
|
||||
# force close of console window
|
||||
swp_hidewindow = 0x0080
|
||||
console_wnd = ctypes.windll.kernel32.GetConsoleWindow()
|
||||
if console_wnd != 0:
|
||||
ctypes.windll.user32.SetWindowPos(console_wnd, None, 0, 0, 0, 0, swp_hidewindow)
|
||||
|
||||
if platform.system() != "Linux":
|
||||
img = QtGui.QPixmap(ComicTaggerSettings.get_graphic("tags.png"))
|
||||
|
||||
splash = QtWidgets.QSplashScreen(img)
|
||||
splash.show()
|
||||
splash.raise_()
|
||||
QtWidgets.QApplication.processEvents()
|
||||
|
||||
try:
|
||||
tagger_window = TaggerWindow(opts.files, SETTINGS, opts=opts)
|
||||
tagger_window.setWindowIcon(QtGui.QIcon(ComicTaggerSettings.get_graphic("app.png")))
|
||||
tagger_window.show()
|
||||
|
||||
if platform.system() != "Linux":
|
||||
splash.finish(tagger_window)
|
||||
|
||||
sys.exit(app.exec())
|
||||
except Exception:
|
||||
logger.exception("GUI mode failed")
|
||||
QtWidgets.QMessageBox.critical(
|
||||
QtWidgets.QMainWindow(), "Error", "Unhandled exception in app:\n" + traceback.format_exc()
|
||||
)
|
||||
157
comictaggerlib/matchselectionwindow.py
Normal file
@@ -0,0 +1,157 @@
|
||||
"""A PyQT4 dialog to select from automated issue matches"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from PyQt5 import QtCore, QtWidgets, uic
|
||||
|
||||
from comicapi.comicarchive import ComicArchive
|
||||
from comictaggerlib.coverimagewidget import CoverImageWidget
|
||||
from comictaggerlib.resulttypes import IssueResult
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
from comictaggerlib.ui.qtutils import reduce_widget_font_size
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MatchSelectionWindow(QtWidgets.QDialog):
|
||||
volume_id = 0
|
||||
|
||||
def __init__(self, parent: QtWidgets.QWidget, matches: list[IssueResult], comic_archive: ComicArchive) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("matchselectionwindow.ui"), self)
|
||||
|
||||
self.altCoverWidget = CoverImageWidget(self.altCoverContainer, CoverImageWidget.AltCoverMode)
|
||||
gridlayout = QtWidgets.QGridLayout(self.altCoverContainer)
|
||||
gridlayout.addWidget(self.altCoverWidget)
|
||||
gridlayout.setContentsMargins(0, 0, 0, 0)
|
||||
|
||||
self.archiveCoverWidget = CoverImageWidget(self.archiveCoverContainer, CoverImageWidget.ArchiveMode)
|
||||
gridlayout = QtWidgets.QGridLayout(self.archiveCoverContainer)
|
||||
gridlayout.addWidget(self.archiveCoverWidget)
|
||||
gridlayout.setContentsMargins(0, 0, 0, 0)
|
||||
|
||||
reduce_widget_font_size(self.twList)
|
||||
reduce_widget_font_size(self.teDescription, 1)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowType(
|
||||
self.windowFlags()
|
||||
| QtCore.Qt.WindowType.WindowSystemMenuHint
|
||||
| QtCore.Qt.WindowType.WindowMaximizeButtonHint
|
||||
)
|
||||
)
|
||||
|
||||
self.matches: list[IssueResult] = matches
|
||||
self.comic_archive = comic_archive
|
||||
|
||||
self.twList.currentItemChanged.connect(self.current_item_changed)
|
||||
self.twList.cellDoubleClicked.connect(self.cell_double_clicked)
|
||||
|
||||
self.update_data()
|
||||
|
||||
def update_data(self) -> None:
|
||||
|
||||
self.set_cover_image()
|
||||
self.populate_table()
|
||||
self.twList.resizeColumnsToContents()
|
||||
self.twList.selectRow(0)
|
||||
|
||||
path = self.comic_archive.path
|
||||
self.setWindowTitle(f"Select correct match: {os.path.split(path)[1]}")
|
||||
|
||||
def populate_table(self) -> None:
|
||||
|
||||
while self.twList.rowCount() > 0:
|
||||
self.twList.removeRow(0)
|
||||
|
||||
self.twList.setSortingEnabled(False)
|
||||
|
||||
row = 0
|
||||
for match in self.matches:
|
||||
self.twList.insertRow(row)
|
||||
|
||||
item_text = match["series"]
|
||||
item = QtWidgets.QTableWidgetItem(item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.UserRole, (match,))
|
||||
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 0, item)
|
||||
|
||||
if match["publisher"] is not None:
|
||||
item_text = str(match["publisher"])
|
||||
else:
|
||||
item_text = "Unknown"
|
||||
item = QtWidgets.QTableWidgetItem(item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 1, item)
|
||||
|
||||
month_str = ""
|
||||
year_str = "????"
|
||||
if match["month"] is not None:
|
||||
month_str = f"-{int(match['month']):02d}"
|
||||
if match["year"] is not None:
|
||||
year_str = str(match["year"])
|
||||
|
||||
item_text = year_str + month_str
|
||||
item = QtWidgets.QTableWidgetItem(item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 2, item)
|
||||
|
||||
item_text = match["issue_title"]
|
||||
if item_text is None:
|
||||
item_text = ""
|
||||
item = QtWidgets.QTableWidgetItem(item_text)
|
||||
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 3, item)
|
||||
|
||||
row += 1
|
||||
|
||||
self.twList.resizeColumnsToContents()
|
||||
self.twList.setSortingEnabled(True)
|
||||
self.twList.sortItems(2, QtCore.Qt.SortOrder.AscendingOrder)
|
||||
self.twList.selectRow(0)
|
||||
self.twList.resizeColumnsToContents()
|
||||
self.twList.horizontalHeader().setStretchLastSection(True)
|
||||
|
||||
def cell_double_clicked(self, r: int, c: int) -> None:
|
||||
self.accept()
|
||||
|
||||
def current_item_changed(self, curr: QtCore.QModelIndex, prev: QtCore.QModelIndex) -> None:
|
||||
|
||||
if curr is None:
|
||||
return
|
||||
if prev is not None and prev.row() == curr.row():
|
||||
return
|
||||
|
||||
self.altCoverWidget.set_issue_id(self.current_match()["issue_id"])
|
||||
if self.current_match()["description"] is None:
|
||||
self.teDescription.setText("")
|
||||
else:
|
||||
self.teDescription.setText(self.current_match()["description"])
|
||||
|
||||
def set_cover_image(self) -> None:
|
||||
self.archiveCoverWidget.set_archive(self.comic_archive)
|
||||
|
||||
def current_match(self) -> IssueResult:
|
||||
row = self.twList.currentRow()
|
||||
match: IssueResult = self.twList.item(row, 0).data(QtCore.Qt.ItemDataRole.UserRole)[0]
|
||||
return match
|
||||
114
comictaggerlib/optionalmsgdialog.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""A PyQt5 dialog to show a message and let the user check a box
|
||||
|
||||
Example usage:
|
||||
|
||||
checked = OptionalMessageDialog.msg(self, "Disclaimer",
|
||||
"This is beta software, and you are using it at your own risk!",
|
||||
)
|
||||
|
||||
said_yes, checked = OptionalMessageDialog.question(self, "QtWidgets.Question",
|
||||
"Are you sure you wish to do this?",
|
||||
)
|
||||
"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import Union
|
||||
|
||||
from PyQt5 import QtCore, QtWidgets
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
StyleMessage = 0
|
||||
StyleQuestion = 1
|
||||
|
||||
|
||||
class OptionalMessageDialog(QtWidgets.QDialog):
|
||||
def __init__(
|
||||
self, parent: QtWidgets.QWidget, style: int, title: str, msg: str, checked: bool = False, check_text: str = ""
|
||||
) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
self.setWindowTitle(title)
|
||||
self.was_accepted = False
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
|
||||
self.theLabel = QtWidgets.QLabel(msg)
|
||||
self.theLabel.setWordWrap(True)
|
||||
self.theLabel.setTextFormat(QtCore.Qt.TextFormat.RichText)
|
||||
self.theLabel.setOpenExternalLinks(True)
|
||||
self.theLabel.setTextInteractionFlags(
|
||||
QtCore.Qt.TextInteractionFlag.TextSelectableByMouse
|
||||
| QtCore.Qt.TextInteractionFlag.LinksAccessibleByMouse
|
||||
| QtCore.Qt.TextInteractionFlag.LinksAccessibleByKeyboard
|
||||
)
|
||||
|
||||
layout.addWidget(self.theLabel)
|
||||
layout.insertSpacing(-1, 10)
|
||||
|
||||
if not check_text:
|
||||
if style == StyleQuestion:
|
||||
check_text = "Remember this answer"
|
||||
else:
|
||||
check_text = "Don't show this message again"
|
||||
|
||||
self.theCheckBox = QtWidgets.QCheckBox(check_text)
|
||||
|
||||
self.theCheckBox.setChecked(checked)
|
||||
|
||||
layout.addWidget(self.theCheckBox)
|
||||
|
||||
btnbox_style: Union[QtWidgets.QDialogButtonBox.StandardButtons, QtWidgets.QDialogButtonBox.StandardButton]
|
||||
if style == StyleQuestion:
|
||||
btnbox_style = QtWidgets.QDialogButtonBox.StandardButton.Yes | QtWidgets.QDialogButtonBox.StandardButton.No
|
||||
else:
|
||||
btnbox_style = QtWidgets.QDialogButtonBox.StandardButton.Ok
|
||||
|
||||
self.theButtonBox = QtWidgets.QDialogButtonBox(
|
||||
btnbox_style,
|
||||
parent=self,
|
||||
)
|
||||
self.theButtonBox.accepted.connect(self.accept)
|
||||
self.theButtonBox.rejected.connect(self.reject)
|
||||
|
||||
layout.addWidget(self.theButtonBox)
|
||||
|
||||
def accept(self) -> None:
|
||||
self.was_accepted = True
|
||||
QtWidgets.QDialog.accept(self)
|
||||
|
||||
def reject(self) -> None:
|
||||
self.was_accepted = False
|
||||
QtWidgets.QDialog.reject(self)
|
||||
|
||||
@staticmethod
|
||||
def msg(parent: QtWidgets.QWidget, title: str, msg: str, checked: bool = False, check_text: str = "") -> bool:
|
||||
|
||||
d = OptionalMessageDialog(parent, StyleMessage, title, msg, checked=checked, check_text=check_text)
|
||||
|
||||
d.exec()
|
||||
return d.theCheckBox.isChecked()
|
||||
|
||||
@staticmethod
|
||||
def question(
|
||||
parent: QtWidgets.QWidget, title: str, msg: str, checked: bool = False, check_text: str = ""
|
||||
) -> tuple[bool, bool]:
|
||||
|
||||
d = OptionalMessageDialog(parent, StyleQuestion, title, msg, checked=checked, check_text=check_text)
|
||||
|
||||
d.exec()
|
||||
|
||||
return d.was_accepted, d.theCheckBox.isChecked()
|
||||
394
comictaggerlib/options.py
Normal file
@@ -0,0 +1,394 @@
|
||||
"""CLI options class for ComicTagger app"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
||||
from comicapi import utils
|
||||
from comicapi.comicarchive import MetaDataStyle
|
||||
from comicapi.genericmetadata import GenericMetadata
|
||||
from comictaggerlib import ctversion
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def define_args() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""A utility for reading and writing metadata to comic archives.
|
||||
|
||||
If no options are given, %(prog)s will run in windowed mode.""",
|
||||
epilog="For more help visit the wiki at: https://github.com/comictagger/comictagger/wiki",
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
action="store_true",
|
||||
help="Display version.",
|
||||
)
|
||||
commands = parser.add_mutually_exclusive_group()
|
||||
commands.add_argument(
|
||||
"-p",
|
||||
"--print",
|
||||
action="store_true",
|
||||
help="""Print out tag info from file. Specify type\n(via -t) to get only info of that tag type.\n\n""",
|
||||
)
|
||||
commands.add_argument(
|
||||
"-d",
|
||||
"--delete",
|
||||
action="store_true",
|
||||
help="Deletes the tag block of specified type (via -t).\n",
|
||||
)
|
||||
commands.add_argument(
|
||||
"-c",
|
||||
"--copy",
|
||||
type=metadata_type,
|
||||
metavar="{CR,CBL,COMET}",
|
||||
help="Copy the specified source tag block to\ndestination style specified via -t\n(potentially lossy operation).\n\n",
|
||||
)
|
||||
commands.add_argument(
|
||||
"-s",
|
||||
"--save",
|
||||
action="store_true",
|
||||
help="Save out tags as specified type (via -t).\nMust specify also at least -o, -f, or -m.\n\n",
|
||||
)
|
||||
commands.add_argument(
|
||||
"-r",
|
||||
"--rename",
|
||||
action="store_true",
|
||||
help="Rename the file based on specified tag style.",
|
||||
)
|
||||
commands.add_argument(
|
||||
"-e",
|
||||
"--export-to-zip",
|
||||
action="store_true",
|
||||
help="Export RAR archive to Zip format.",
|
||||
)
|
||||
commands.add_argument(
|
||||
"--only-set-cv-key",
|
||||
action="store_true",
|
||||
help="Only set the Comic Vine API key and quit.\n\n",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-1",
|
||||
"--assume-issue-one",
|
||||
action="store_true",
|
||||
help="""Assume issue number is 1 if not found (relevant for -s).\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--abort-on-conflict",
|
||||
action="store_true",
|
||||
help="""Don't export to zip if intended new filename\nexists (otherwise, creates a new unique filename).\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--auto-imprint",
|
||||
action="store_true",
|
||||
help="""Enables the auto imprint functionality.\ne.g. if the publisher is set to 'vertigo' it\nwill be updated to 'DC Comics' and the imprint\nproperty will be set to 'Vertigo'.\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--config",
|
||||
dest="config_path",
|
||||
help="""Config directory defaults to ~/.ComicTagger\non Linux/Mac and %%APPDATA%% on Windows\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--cv-api-key",
|
||||
help="Use the given Comic Vine API Key (persisted in settings).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--delete-rar",
|
||||
action="store_true",
|
||||
dest="delete_after_zip_export",
|
||||
help="""Delete original RAR archive after successful\nexport to Zip.""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--parse-filename",
|
||||
"--parsefilename",
|
||||
action="store_true",
|
||||
help="""Parse the filename to get some info,\nspecifically series name, issue number,\nvolume, and publication year.\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--id",
|
||||
dest="issue_id",
|
||||
type=int,
|
||||
help="""Use the issue ID when searching online.\nOverrides all other metadata.\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--type",
|
||||
metavar="{CR,CBL,COMET}",
|
||||
type=metadata_type,
|
||||
help="""Specify TYPE as either CR, CBL, COMET\n(as either ComicRack, ComicBookLover,\nor CoMet style tags, respectively).\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--online",
|
||||
action="store_true",
|
||||
help="""Search online and attempt to identify file\nusing existing metadata and images in archive.\nMay be used in conjunction with -f and -m.\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-m",
|
||||
"--metadata",
|
||||
type=parse_metadata_from_string,
|
||||
help="""Explicitly define, as a list, some tags to be used. e.g.:\n"series=Plastic Man, publisher=Quality Comics"\n"series=Kickers^, Inc., issue=1, year=1986"\nName-Value pairs are comma separated. Use a\n"^" to escape an "=" or a ",", as shown in\nthe example above. Some names that can be\nused: series, issue, issue_count, year,\npublisher, title\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
"--interactive",
|
||||
action="store_true",
|
||||
help="""Interactively query the user when there are\nmultiple matches for an online search.\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-overwrite",
|
||||
"--nooverwrite",
|
||||
action="store_true",
|
||||
help="""Don't modify tag block if it already exists (relevant for -s or -c).""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--noabort",
|
||||
dest="abort_on_low_confidence",
|
||||
action="store_false",
|
||||
help="""Don't abort save operation when online match\nis of low confidence.\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--nosummary",
|
||||
dest="show_save_summary",
|
||||
action="store_false",
|
||||
help="Suppress the default summary after a save operation.\n\n",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--overwrite",
|
||||
action="store_true",
|
||||
help="""Overwite all existing metadata.\nMay be used in conjunction with -o, -f and -m.\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--raw", action="store_true", help="""With -p, will print out the raw tag block(s)\nfrom the file.\n"""
|
||||
)
|
||||
parser.add_argument(
|
||||
"-R",
|
||||
"--recursive",
|
||||
action="store_true",
|
||||
help="Recursively include files in sub-folders.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-S",
|
||||
"--script",
|
||||
help="""Run an "add-on" python script that uses the\nComicTagger library for custom processing.\nScript arguments can follow the script name.\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--split-words",
|
||||
action="store_true",
|
||||
help="""Splits words before parsing the filename.\ne.g. 'judgedredd' to 'judge dredd'\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--terse",
|
||||
action="store_true",
|
||||
help="Don't say much (for print mode).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="Be noisy when doing what it does.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-w",
|
||||
"--wait-on-cv-rate-limit",
|
||||
action="store_true",
|
||||
help="""When encountering a Comic Vine rate limit\nerror, wait and retry query.\n\n""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n", "--dryrun", action="store_true", help="Don't actually modify file (only relevant for -d, -s, or -r).\n\n"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--darkmode",
|
||||
action="store_true",
|
||||
help="Windows only. Force a dark pallet",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-g",
|
||||
"--glob",
|
||||
action="store_true",
|
||||
help="Windows only. Enable globbing",
|
||||
)
|
||||
parser.add_argument("files", nargs="*")
|
||||
return parser
|
||||
|
||||
|
||||
def metadata_type(typ: str) -> int:
|
||||
if typ.casefold() not in MetaDataStyle.short:
|
||||
choices = ", ".join(MetaDataStyle.short)
|
||||
raise argparse.ArgumentTypeError(f"invalid choice: {typ} (choose from {choices.upper()})")
|
||||
return MetaDataStyle.short.index(typ)
|
||||
|
||||
|
||||
def parse_metadata_from_string(mdstr: str) -> GenericMetadata:
|
||||
"""The metadata string is a comma separated list of name-value pairs
|
||||
The names match the attributes of the internal metadata struct (for now)
|
||||
The caret is the special "escape character", since it's not common in
|
||||
natural language text
|
||||
|
||||
example = "series=Kickers^, Inc. ,issue=1, year=1986"
|
||||
"""
|
||||
|
||||
escaped_comma = "^,"
|
||||
escaped_equals = "^="
|
||||
replacement_token = "<_~_>"
|
||||
|
||||
md = GenericMetadata()
|
||||
|
||||
# First, replace escaped commas with with a unique token (to be changed back later)
|
||||
mdstr = mdstr.replace(escaped_comma, replacement_token)
|
||||
tmp_list = mdstr.split(",")
|
||||
md_list = []
|
||||
for item in tmp_list:
|
||||
item = item.replace(replacement_token, ",")
|
||||
md_list.append(item)
|
||||
|
||||
# Now build a nice dict from the list
|
||||
md_dict = {}
|
||||
for item in md_list:
|
||||
# Make sure to fix any escaped equal signs
|
||||
i = item.replace(escaped_equals, replacement_token)
|
||||
key, value = i.split("=")
|
||||
value = value.replace(replacement_token, "=").strip()
|
||||
key = key.strip()
|
||||
if key.lower() == "credit":
|
||||
cred_attribs = value.split(":")
|
||||
role = cred_attribs[0]
|
||||
person = cred_attribs[1] if len(cred_attribs) > 1 else ""
|
||||
primary = len(cred_attribs) > 2
|
||||
md.add_credit(person.strip(), role.strip(), primary)
|
||||
else:
|
||||
md_dict[key] = value
|
||||
|
||||
# Map the dict to the metadata object
|
||||
for key, value in md_dict.items():
|
||||
if not hasattr(md, key):
|
||||
raise argparse.ArgumentTypeError(f"'{key}' is not a valid tag name")
|
||||
else:
|
||||
md.is_empty = False
|
||||
setattr(md, key, value)
|
||||
return md
|
||||
|
||||
|
||||
def launch_script(scriptfile: str, args: list[str]) -> None:
|
||||
# we were given a script. special case for the args:
|
||||
# 1. ignore everything before the -S,
|
||||
# 2. pass all the ones that follow (including script name) to the script
|
||||
if not os.path.exists(scriptfile):
|
||||
logger.error("Can't find %s", scriptfile)
|
||||
else:
|
||||
# I *think* this makes sense:
|
||||
# assume the base name of the file is the module name
|
||||
# add the folder of the given file to the python path import module
|
||||
dirname = os.path.dirname(scriptfile)
|
||||
module_name = os.path.splitext(os.path.basename(scriptfile))[0]
|
||||
sys.path = [dirname] + sys.path
|
||||
try:
|
||||
script = __import__(module_name)
|
||||
|
||||
# Determine if the entry point exists before trying to run it
|
||||
if "main" in dir(script):
|
||||
script.main(args)
|
||||
else:
|
||||
logger.error("Can't find entry point 'main()' in module '%s'", module_name)
|
||||
except Exception:
|
||||
logger.exception("Script: %s raised an unhandled exception: ", module_name)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def parse_cmd_line() -> argparse.Namespace:
|
||||
|
||||
if platform.system() == "Darwin" and hasattr(sys, "frozen") and sys.frozen == 1:
|
||||
# remove the PSN (process serial number) argument from OS/X
|
||||
input_args = [a for a in sys.argv[1:] if "-psn_0_" not in a]
|
||||
else:
|
||||
input_args = sys.argv[1:]
|
||||
|
||||
script_args = []
|
||||
|
||||
# first check if we're launching a script and split off script args
|
||||
for n, _ in enumerate(input_args):
|
||||
if input_args[n] == "--":
|
||||
break
|
||||
|
||||
if input_args[n] in ["-S", "--script"] and n + 1 < len(input_args):
|
||||
# insert a "--" which will cause getopt to ignore the remaining args
|
||||
# so they will be passed to the script
|
||||
script_args = input_args[n + 2 :]
|
||||
input_args = input_args[: n + 2]
|
||||
break
|
||||
|
||||
parser = define_args()
|
||||
opts = parser.parse_args(input_args)
|
||||
|
||||
if opts.config_path:
|
||||
opts.config_path = os.path.abspath(opts.config_path)
|
||||
if opts.version:
|
||||
parser.exit(
|
||||
status=1,
|
||||
message=f"ComicTagger {ctversion.version}: Copyright (c) 2012-2022 ComicTagger Team\n"
|
||||
"Distributed under Apache License 2.0 (http://www.apache.org/licenses/LICENSE-2.0)\n",
|
||||
)
|
||||
|
||||
opts.no_gui = any(
|
||||
[
|
||||
opts.print,
|
||||
opts.delete,
|
||||
opts.save,
|
||||
opts.copy,
|
||||
opts.rename,
|
||||
opts.export_to_zip,
|
||||
opts.only_set_cv_key,
|
||||
]
|
||||
)
|
||||
|
||||
if opts.script is not None:
|
||||
launch_script(opts.script, script_args)
|
||||
|
||||
if platform.system() == "Windows" and opts.glob:
|
||||
# no globbing on windows shell, so do it for them
|
||||
import glob
|
||||
|
||||
globs = opts.files
|
||||
opts.files = []
|
||||
for item in globs:
|
||||
opts.files.extend(glob.glob(item))
|
||||
|
||||
if opts.only_set_cv_key and opts.cv_api_key is None:
|
||||
parser.exit(message="Key not given!", status=1)
|
||||
|
||||
if not opts.only_set_cv_key and opts.no_gui and not opts.files:
|
||||
parser.exit(message="Command requires at least one filename!\n", status=1)
|
||||
|
||||
if opts.delete and opts.type is None:
|
||||
parser.exit(message="Please specify the type to delete with -t\n", status=1)
|
||||
|
||||
if opts.save and opts.type is None:
|
||||
parser.exit(message="Please specify the type to save with -t\n", status=1)
|
||||
|
||||
if opts.copy and opts.type is None:
|
||||
parser.exit(message="Please specify the type to copy to with -t\n", status=1)
|
||||
|
||||
if opts.recursive:
|
||||
opts.file_list = utils.get_recursive_filelist(opts.file_list)
|
||||
|
||||
return opts
|
||||
116
comictaggerlib/pagebrowser.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""A PyQT4 dialog to show pages of a comic archive"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import platform
|
||||
from typing import Optional
|
||||
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets, uic
|
||||
|
||||
from comicapi.comicarchive import ComicArchive
|
||||
from comicapi.genericmetadata import GenericMetadata
|
||||
from comictaggerlib.coverimagewidget import CoverImageWidget
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PageBrowserWindow(QtWidgets.QDialog):
|
||||
def __init__(self, parent: QtWidgets.QWidget, metadata: GenericMetadata) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("pagebrowser.ui"), self)
|
||||
|
||||
self.pageWidget = CoverImageWidget(self.pageContainer, CoverImageWidget.ArchiveMode)
|
||||
gridlayout = QtWidgets.QGridLayout(self.pageContainer)
|
||||
gridlayout.addWidget(self.pageWidget)
|
||||
gridlayout.setContentsMargins(0, 0, 0, 0)
|
||||
self.pageWidget.showControls = False
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowType(
|
||||
self.windowFlags()
|
||||
| QtCore.Qt.WindowType.WindowSystemMenuHint
|
||||
| QtCore.Qt.WindowType.WindowMaximizeButtonHint
|
||||
)
|
||||
)
|
||||
|
||||
self.comic_archive: Optional[ComicArchive] = None
|
||||
self.page_count = 0
|
||||
self.current_page_num = 0
|
||||
self.metadata = metadata
|
||||
|
||||
self.buttonBox.button(QtWidgets.QDialogButtonBox.StandardButton.Close).setDefault(True)
|
||||
if platform.system() == "Darwin":
|
||||
self.btnPrev.setText("<<")
|
||||
self.btnNext.setText(">>")
|
||||
else:
|
||||
self.btnPrev.setIcon(QtGui.QIcon(ComicTaggerSettings.get_graphic("left.png")))
|
||||
self.btnNext.setIcon(QtGui.QIcon(ComicTaggerSettings.get_graphic("right.png")))
|
||||
|
||||
self.btnNext.clicked.connect(self.next_page)
|
||||
self.btnPrev.clicked.connect(self.prev_page)
|
||||
self.show()
|
||||
|
||||
self.btnNext.setEnabled(False)
|
||||
self.btnPrev.setEnabled(False)
|
||||
|
||||
def reset(self) -> None:
|
||||
self.comic_archive = None
|
||||
self.page_count = 0
|
||||
self.current_page_num = 0
|
||||
self.metadata = GenericMetadata()
|
||||
|
||||
self.btnNext.setEnabled(False)
|
||||
self.btnPrev.setEnabled(False)
|
||||
self.pageWidget.clear()
|
||||
|
||||
def set_comic_archive(self, ca: ComicArchive) -> None:
|
||||
|
||||
self.comic_archive = ca
|
||||
self.page_count = ca.get_number_of_pages()
|
||||
self.current_page_num = 0
|
||||
self.pageWidget.set_archive(self.comic_archive)
|
||||
self.set_page()
|
||||
|
||||
if self.page_count > 1:
|
||||
self.btnNext.setEnabled(True)
|
||||
self.btnPrev.setEnabled(True)
|
||||
|
||||
def next_page(self) -> None:
|
||||
|
||||
if self.current_page_num + 1 < self.page_count:
|
||||
self.current_page_num += 1
|
||||
else:
|
||||
self.current_page_num = 0
|
||||
self.set_page()
|
||||
|
||||
def prev_page(self) -> None:
|
||||
|
||||
if self.current_page_num - 1 >= 0:
|
||||
self.current_page_num -= 1
|
||||
else:
|
||||
self.current_page_num = self.page_count - 1
|
||||
self.set_page()
|
||||
|
||||
def set_page(self) -> None:
|
||||
if not self.metadata.is_empty:
|
||||
archive_page_index = self.metadata.get_archive_page_index(self.current_page_num)
|
||||
else:
|
||||
archive_page_index = self.current_page_num
|
||||
|
||||
self.pageWidget.set_page(archive_page_index)
|
||||
self.setWindowTitle(f"Page Browser - Page {self.current_page_num + 1} (of {self.page_count}) ")
|
||||
398
comictaggerlib/pagelisteditor.py
Normal file
@@ -0,0 +1,398 @@
|
||||
"""A PyQt5 widget for editing the page list info"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets, uic
|
||||
|
||||
from comicapi.comicarchive import ComicArchive, MetaDataStyle
|
||||
from comicapi.genericmetadata import ImageMetadata, PageType
|
||||
from comictaggerlib.coverimagewidget import CoverImageWidget
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def item_move_events(widget: QtWidgets.QWidget) -> QtCore.pyqtBoundSignal:
|
||||
class Filter(QtCore.QObject):
|
||||
|
||||
mysignal = QtCore.pyqtSignal(str)
|
||||
|
||||
def eventFilter(self, obj: QtCore.QObject, event: QtCore.QEvent) -> bool:
|
||||
|
||||
if obj == widget:
|
||||
if event.type() == QtCore.QEvent.Type.ChildRemoved:
|
||||
self.mysignal.emit("finish")
|
||||
if event.type() == QtCore.QEvent.Type.ChildAdded:
|
||||
self.mysignal.emit("start")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
filt = Filter(widget)
|
||||
widget.installEventFilter(filt)
|
||||
return filt.mysignal
|
||||
|
||||
|
||||
class PageListEditor(QtWidgets.QWidget):
|
||||
firstFrontCoverChanged = QtCore.pyqtSignal(int)
|
||||
listOrderChanged = QtCore.pyqtSignal()
|
||||
modified = QtCore.pyqtSignal()
|
||||
|
||||
pageTypeNames = {
|
||||
PageType.FrontCover: "Front Cover",
|
||||
PageType.InnerCover: "Inner Cover",
|
||||
PageType.Advertisement: "Advertisement",
|
||||
PageType.Roundup: "Roundup",
|
||||
PageType.Story: "Story",
|
||||
PageType.Editorial: "Editorial",
|
||||
PageType.Letters: "Letters",
|
||||
PageType.Preview: "Preview",
|
||||
PageType.BackCover: "Back Cover",
|
||||
PageType.Other: "Other",
|
||||
PageType.Deleted: "Deleted",
|
||||
}
|
||||
|
||||
def __init__(self, parent: QtWidgets.QWidget) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("pagelisteditor.ui"), self)
|
||||
|
||||
self.pageWidget = CoverImageWidget(self.pageContainer, CoverImageWidget.ArchiveMode)
|
||||
gridlayout = QtWidgets.QGridLayout(self.pageContainer)
|
||||
gridlayout.addWidget(self.pageWidget)
|
||||
gridlayout.setContentsMargins(0, 0, 0, 0)
|
||||
self.pageWidget.showControls = False
|
||||
|
||||
self.reset_page()
|
||||
|
||||
# Add the entries to the page type combobox
|
||||
self.add_page_type_item("", "", "Alt+0", False)
|
||||
self.add_page_type_item(self.pageTypeNames[PageType.FrontCover], PageType.FrontCover, "Alt+F")
|
||||
self.add_page_type_item(self.pageTypeNames[PageType.InnerCover], PageType.InnerCover, "Alt+I")
|
||||
self.add_page_type_item(self.pageTypeNames[PageType.Advertisement], PageType.Advertisement, "Alt+A")
|
||||
self.add_page_type_item(self.pageTypeNames[PageType.Roundup], PageType.Roundup, "Alt+R")
|
||||
self.add_page_type_item(self.pageTypeNames[PageType.Story], PageType.Story, "Alt+S")
|
||||
self.add_page_type_item(self.pageTypeNames[PageType.Editorial], PageType.Editorial, "Alt+E")
|
||||
self.add_page_type_item(self.pageTypeNames[PageType.Letters], PageType.Letters, "Alt+L")
|
||||
self.add_page_type_item(self.pageTypeNames[PageType.Preview], PageType.Preview, "Alt+P")
|
||||
self.add_page_type_item(self.pageTypeNames[PageType.BackCover], PageType.BackCover, "Alt+B")
|
||||
self.add_page_type_item(self.pageTypeNames[PageType.Other], PageType.Other, "Alt+O")
|
||||
self.add_page_type_item(self.pageTypeNames[PageType.Deleted], PageType.Deleted, "Alt+X")
|
||||
|
||||
self.listWidget.itemSelectionChanged.connect(self.change_page)
|
||||
item_move_events(self.listWidget).connect(self.item_move_event)
|
||||
self.cbPageType.activated.connect(self.change_page_type)
|
||||
self.chkDoublePage.clicked.connect(self.toggle_double_page)
|
||||
self.leBookmark.editingFinished.connect(self.save_bookmark)
|
||||
self.btnUp.clicked.connect(self.move_current_up)
|
||||
self.btnDown.clicked.connect(self.move_current_down)
|
||||
self.pre_move_row = -1
|
||||
self.first_front_page: Optional[int] = None
|
||||
|
||||
self.comic_archive: Optional[ComicArchive] = None
|
||||
self.pages_list: list[ImageMetadata] = []
|
||||
|
||||
def reset_page(self) -> None:
|
||||
self.pageWidget.clear()
|
||||
self.cbPageType.setDisabled(True)
|
||||
self.chkDoublePage.setDisabled(True)
|
||||
self.leBookmark.setDisabled(True)
|
||||
self.comic_archive = None
|
||||
self.pages_list = []
|
||||
|
||||
def add_page_type_item(self, text: str, user_data: str, shortcut: str, show_shortcut: bool = True) -> None:
|
||||
if show_shortcut:
|
||||
text = text + " (" + shortcut + ")"
|
||||
self.cbPageType.addItem(text, user_data)
|
||||
actionItem = QtWidgets.QAction(shortcut, self)
|
||||
actionItem.triggered.connect(lambda: self.select_page_type_item(self.cbPageType.findData(user_data)))
|
||||
actionItem.setShortcut(shortcut)
|
||||
self.addAction(actionItem)
|
||||
|
||||
def select_page_type_item(self, idx: int) -> None:
|
||||
if self.cbPageType.isEnabled():
|
||||
self.cbPageType.setCurrentIndex(idx)
|
||||
self.change_page_type(idx)
|
||||
|
||||
def get_new_indexes(self, movement: int) -> list[tuple[int, int]]:
|
||||
selection = self.listWidget.selectionModel().selectedRows()
|
||||
selection.sort(reverse=movement > 0)
|
||||
newindexes: list[int] = []
|
||||
oldindexes: list[int] = []
|
||||
for x in selection:
|
||||
current = x.row()
|
||||
oldindexes.append(current)
|
||||
if 0 <= current + movement <= self.listWidget.count() - 1:
|
||||
if len(newindexes) < 1 or current + movement != newindexes[-1]:
|
||||
current += movement
|
||||
|
||||
newindexes.append(current)
|
||||
oldindexes.sort()
|
||||
newindexes.sort()
|
||||
return list(zip(newindexes, oldindexes))
|
||||
|
||||
def set_selection(self, indexes: list[tuple[int, int]]) -> list[tuple[int, int]]:
|
||||
selection_ranges: list[tuple[int, int]] = []
|
||||
first = 0
|
||||
for i, sel in enumerate(indexes):
|
||||
if i == 0:
|
||||
first = sel[0]
|
||||
continue
|
||||
|
||||
if sel[0] != indexes[i - 1][0] + 1:
|
||||
selection_ranges.append((first, indexes[i - 1][0]))
|
||||
first = sel[0]
|
||||
|
||||
selection_ranges.append((first, indexes[-1][0]))
|
||||
selection = QtCore.QItemSelection()
|
||||
for x in selection_ranges:
|
||||
selection.merge(
|
||||
QtCore.QItemSelection(self.listWidget.model().index(x[0], 0), self.listWidget.model().index(x[1], 0)),
|
||||
QtCore.QItemSelectionModel.SelectionFlag.Select,
|
||||
)
|
||||
|
||||
self.listWidget.selectionModel().select(selection, QtCore.QItemSelectionModel.SelectionFlag.ClearAndSelect)
|
||||
return selection_ranges
|
||||
|
||||
def move_current_up(self) -> None:
|
||||
row = self.listWidget.currentRow()
|
||||
selection = self.get_new_indexes(-1)
|
||||
for sel in selection:
|
||||
item = self.listWidget.takeItem(sel[1])
|
||||
self.listWidget.insertItem(sel[0], item)
|
||||
|
||||
if row > 0:
|
||||
self.listWidget.setCurrentRow(row - 1)
|
||||
self.set_selection(selection)
|
||||
self.listOrderChanged.emit()
|
||||
self.emit_front_cover_change()
|
||||
self.modified.emit()
|
||||
|
||||
def move_current_down(self) -> None:
|
||||
row = self.listWidget.currentRow()
|
||||
selection = self.get_new_indexes(1)
|
||||
selection.sort(reverse=True)
|
||||
for sel in selection:
|
||||
item = self.listWidget.takeItem(sel[1])
|
||||
self.listWidget.insertItem(sel[0], item)
|
||||
|
||||
if row < self.listWidget.count() - 1:
|
||||
self.listWidget.setCurrentRow(row + 1)
|
||||
self.listOrderChanged.emit()
|
||||
self.emit_front_cover_change()
|
||||
self.set_selection(selection)
|
||||
self.modified.emit()
|
||||
|
||||
def item_move_event(self, s: str) -> None:
|
||||
if s == "start":
|
||||
self.pre_move_row = self.listWidget.currentRow()
|
||||
if s == "finish":
|
||||
if self.pre_move_row != self.listWidget.currentRow():
|
||||
self.listOrderChanged.emit()
|
||||
self.emit_front_cover_change()
|
||||
self.modified.emit()
|
||||
|
||||
def change_page_type(self, i: int) -> None:
|
||||
new_type = self.cbPageType.itemData(i)
|
||||
if self.get_current_page_type() != new_type:
|
||||
self.set_current_page_type(new_type)
|
||||
self.emit_front_cover_change()
|
||||
self.modified.emit()
|
||||
|
||||
def change_page(self) -> None:
|
||||
row = self.listWidget.currentRow()
|
||||
pagetype = self.get_current_page_type()
|
||||
|
||||
i = self.cbPageType.findData(pagetype)
|
||||
self.cbPageType.setCurrentIndex(i)
|
||||
|
||||
self.chkDoublePage.setChecked("DoublePage" in self.listWidget.item(row).data(QtCore.Qt.UserRole)[0])
|
||||
|
||||
if "Bookmark" in self.listWidget.item(row).data(QtCore.Qt.UserRole)[0]:
|
||||
self.leBookmark.setText(self.listWidget.item(row).data(QtCore.Qt.UserRole)[0]["Bookmark"])
|
||||
else:
|
||||
self.leBookmark.setText("")
|
||||
|
||||
idx = int(self.listWidget.item(row).data(QtCore.Qt.ItemDataRole.UserRole)[0]["Image"])
|
||||
|
||||
if self.comic_archive is not None:
|
||||
self.pageWidget.set_archive(self.comic_archive, idx)
|
||||
|
||||
def get_first_front_cover(self) -> int:
|
||||
front_cover = 0
|
||||
for i in range(self.listWidget.count()):
|
||||
item = self.listWidget.item(i)
|
||||
page_dict: ImageMetadata = item.data(QtCore.Qt.ItemDataRole.UserRole)[0]
|
||||
if "Type" in page_dict and page_dict["Type"] == PageType.FrontCover:
|
||||
front_cover = int(page_dict["Image"])
|
||||
break
|
||||
return front_cover
|
||||
|
||||
def get_current_page_type(self) -> str:
|
||||
row = self.listWidget.currentRow()
|
||||
page_dict: ImageMetadata = self.listWidget.item(row).data(QtCore.Qt.ItemDataRole.UserRole)[0]
|
||||
if "Type" in page_dict:
|
||||
return page_dict["Type"]
|
||||
|
||||
return ""
|
||||
|
||||
def set_current_page_type(self, t: str) -> None:
|
||||
row = self.listWidget.currentRow()
|
||||
page_dict: ImageMetadata = self.listWidget.item(row).data(QtCore.Qt.ItemDataRole.UserRole)[0]
|
||||
|
||||
if t == "":
|
||||
if "Type" in page_dict:
|
||||
del page_dict["Type"]
|
||||
else:
|
||||
page_dict["Type"] = t
|
||||
|
||||
item = self.listWidget.item(row)
|
||||
# wrap the dict in a tuple to keep from being converted to QtWidgets.QStrings
|
||||
item.setData(QtCore.Qt.ItemDataRole.UserRole, (page_dict,))
|
||||
item.setText(self.list_entry_text(page_dict))
|
||||
|
||||
def toggle_double_page(self) -> None:
|
||||
row = self.listWidget.currentRow()
|
||||
page_dict: ImageMetadata = self.listWidget.item(row).data(QtCore.Qt.UserRole)[0]
|
||||
|
||||
cbx = self.sender()
|
||||
|
||||
if isinstance(cbx, QtWidgets.QCheckBox) and cbx.isChecked():
|
||||
if "DoublePage" not in page_dict:
|
||||
page_dict["DoublePage"] = True
|
||||
self.modified.emit()
|
||||
elif "DoublePage" in page_dict:
|
||||
del page_dict["DoublePage"]
|
||||
self.modified.emit()
|
||||
|
||||
item = self.listWidget.item(row)
|
||||
# wrap the dict in a tuple to keep from being converted to QStrings
|
||||
item.setData(QtCore.Qt.UserRole, (page_dict,))
|
||||
item.setText(self.list_entry_text(page_dict))
|
||||
|
||||
self.listWidget.setFocus()
|
||||
|
||||
def save_bookmark(self) -> None:
|
||||
row = self.listWidget.currentRow()
|
||||
page_dict: ImageMetadata = self.listWidget.item(row).data(QtCore.Qt.UserRole)[0]
|
||||
|
||||
current_bookmark = ""
|
||||
if "Bookmark" in page_dict:
|
||||
current_bookmark = page_dict["Bookmark"]
|
||||
|
||||
if self.leBookmark.text().strip():
|
||||
new_bookmark = str(self.leBookmark.text().strip())
|
||||
if current_bookmark != new_bookmark:
|
||||
page_dict["Bookmark"] = new_bookmark
|
||||
self.modified.emit()
|
||||
elif current_bookmark != "":
|
||||
del page_dict["Bookmark"]
|
||||
self.modified.emit()
|
||||
|
||||
item = self.listWidget.item(row)
|
||||
# wrap the dict in a tuple to keep from being converted to QStrings
|
||||
item.setData(QtCore.Qt.UserRole, (page_dict,))
|
||||
item.setText(self.list_entry_text(page_dict))
|
||||
|
||||
self.listWidget.setFocus()
|
||||
|
||||
def set_data(self, comic_archive: ComicArchive, pages_list: list[ImageMetadata]) -> None:
|
||||
self.comic_archive = comic_archive
|
||||
self.pages_list = pages_list
|
||||
if pages_list is not None and len(pages_list) > 0:
|
||||
self.cbPageType.setDisabled(False)
|
||||
self.chkDoublePage.setDisabled(False)
|
||||
self.leBookmark.setDisabled(False)
|
||||
|
||||
self.listWidget.itemSelectionChanged.disconnect(self.change_page)
|
||||
|
||||
self.listWidget.clear()
|
||||
for p in pages_list:
|
||||
item = QtWidgets.QListWidgetItem(self.list_entry_text(p))
|
||||
# wrap the dict in a tuple to keep from being converted to QtWidgets.QStrings
|
||||
item.setData(QtCore.Qt.ItemDataRole.UserRole, (p,))
|
||||
|
||||
self.listWidget.addItem(item)
|
||||
self.first_front_page = self.get_first_front_cover()
|
||||
self.listWidget.itemSelectionChanged.connect(self.change_page)
|
||||
self.listWidget.setCurrentRow(0)
|
||||
|
||||
def list_entry_text(self, page_dict: ImageMetadata) -> str:
|
||||
text = str(int(page_dict["Image"]) + 1)
|
||||
if "Type" in page_dict:
|
||||
if page_dict["Type"] in self.pageTypeNames:
|
||||
text += " (" + self.pageTypeNames[page_dict["Type"]] + ")"
|
||||
else:
|
||||
text += " (Error: " + page_dict["Type"] + ")"
|
||||
if "DoublePage" in page_dict:
|
||||
text += " " + "\U00002461"
|
||||
if "Bookmark" in page_dict:
|
||||
text += " " + "\U0001F516"
|
||||
return text
|
||||
|
||||
def get_page_list(self) -> list[ImageMetadata]:
|
||||
page_list = []
|
||||
for i in range(self.listWidget.count()):
|
||||
item = self.listWidget.item(i)
|
||||
page_list.append(item.data(QtCore.Qt.ItemDataRole.UserRole)[0])
|
||||
return page_list
|
||||
|
||||
def emit_front_cover_change(self) -> None:
|
||||
if self.first_front_page != self.get_first_front_cover():
|
||||
self.first_front_page = self.get_first_front_cover()
|
||||
self.firstFrontCoverChanged.emit(self.first_front_page)
|
||||
|
||||
def set_metadata_style(self, data_style: int) -> None:
|
||||
# depending on the current data style, certain fields are disabled
|
||||
|
||||
inactive_color = QtGui.QColor(255, 170, 150)
|
||||
active_palette = self.cbPageType.palette()
|
||||
|
||||
inactive_palette3 = self.cbPageType.palette()
|
||||
inactive_palette3.setColor(QtGui.QPalette.ColorRole.Base, inactive_color)
|
||||
|
||||
if data_style == MetaDataStyle.CIX:
|
||||
self.btnUp.setEnabled(True)
|
||||
self.btnDown.setEnabled(True)
|
||||
self.cbPageType.setEnabled(True)
|
||||
self.chkDoublePage.setEnabled(True)
|
||||
self.leBookmark.setEnabled(True)
|
||||
self.listWidget.setEnabled(True)
|
||||
|
||||
self.leBookmark.setPalette(active_palette)
|
||||
self.listWidget.setPalette(active_palette)
|
||||
|
||||
elif data_style == MetaDataStyle.CBI:
|
||||
self.btnUp.setEnabled(False)
|
||||
self.btnDown.setEnabled(False)
|
||||
self.cbPageType.setEnabled(False)
|
||||
self.chkDoublePage.setEnabled(False)
|
||||
self.leBookmark.setEnabled(False)
|
||||
self.listWidget.setEnabled(False)
|
||||
|
||||
self.leBookmark.setPalette(inactive_palette3)
|
||||
self.listWidget.setPalette(inactive_palette3)
|
||||
|
||||
elif data_style == MetaDataStyle.COMET:
|
||||
pass
|
||||
|
||||
# make sure combo is disabled when no list
|
||||
if self.comic_archive is None:
|
||||
self.cbPageType.setEnabled(False)
|
||||
self.chkDoublePage.setEnabled(False)
|
||||
self.leBookmark.setEnabled(False)
|
||||
67
comictaggerlib/pageloader.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""A PyQT4 class to load a page image from a ComicArchive in a background thread"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from PyQt5 import QtCore
|
||||
|
||||
from comicapi.comicarchive import ComicArchive
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PageLoader(QtCore.QThread):
|
||||
"""
|
||||
This class holds onto a reference of each instance in a list since
|
||||
problems occur if the ref count goes to zero and the GC tries to reap
|
||||
the object while the thread is going.
|
||||
If the client class wants to stop the thread, they should mark it as
|
||||
"abandoned", and no signals will be issued.
|
||||
"""
|
||||
|
||||
loadComplete = QtCore.pyqtSignal(bytes)
|
||||
|
||||
instanceList: list[QtCore.QThread] = []
|
||||
mutex = QtCore.QMutex()
|
||||
|
||||
# Remove all finished threads from the list
|
||||
@staticmethod
|
||||
def reap_instances() -> None:
|
||||
for obj in reversed(PageLoader.instanceList):
|
||||
if obj.isFinished():
|
||||
PageLoader.instanceList.remove(obj)
|
||||
|
||||
def __init__(self, ca: ComicArchive, page_num: int) -> None:
|
||||
QtCore.QThread.__init__(self)
|
||||
self.ca: ComicArchive = ca
|
||||
self.page_num: int = page_num
|
||||
self.abandoned = False
|
||||
|
||||
# remove any old instances, and then add ourself
|
||||
PageLoader.mutex.lock()
|
||||
PageLoader.reap_instances()
|
||||
PageLoader.instanceList.append(self)
|
||||
PageLoader.mutex.unlock()
|
||||
|
||||
def run(self) -> None:
|
||||
image_data = self.ca.get_page(self.page_num)
|
||||
if self.abandoned:
|
||||
return
|
||||
|
||||
if image_data:
|
||||
if self.abandoned:
|
||||
return
|
||||
self.loadComplete.emit(image_data)
|
||||
42
comictaggerlib/progresswindow.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""A PyQt5 dialog to show ID log and progress"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
from PyQt5 import QtCore, QtWidgets, uic
|
||||
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
from comictaggerlib.ui.qtutils import reduce_widget_font_size
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IDProgressWindow(QtWidgets.QDialog):
|
||||
def __init__(self, parent: QtWidgets.QWidget) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("progresswindow.ui"), self)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowType(
|
||||
self.windowFlags()
|
||||
| QtCore.Qt.WindowType.WindowSystemMenuHint
|
||||
| QtCore.Qt.WindowType.WindowMaximizeButtonHint
|
||||
)
|
||||
)
|
||||
|
||||
reduce_widget_font_size(self.textEdit)
|
||||
208
comictaggerlib/renamewindow.py
Normal file
@@ -0,0 +1,208 @@
|
||||
"""A PyQT4 dialog to confirm rename"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import List, TypedDict
|
||||
|
||||
from PyQt5 import QtCore, QtWidgets, uic
|
||||
|
||||
from comicapi import utils
|
||||
from comicapi.comicarchive import ComicArchive, MetaDataStyle
|
||||
from comictaggerlib.filerenamer import FileRenamer
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
from comictaggerlib.settingswindow import SettingsWindow
|
||||
from comictaggerlib.ui.qtutils import center_window_on_parent
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RenameItem(TypedDict):
|
||||
archive: ComicArchive
|
||||
new_name: str
|
||||
|
||||
|
||||
class RenameWindow(QtWidgets.QDialog):
|
||||
def __init__(
|
||||
self,
|
||||
parent: QtWidgets.QWidget,
|
||||
comic_archive_list: List[ComicArchive],
|
||||
data_style: int,
|
||||
settings: ComicTaggerSettings,
|
||||
) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("renamewindow.ui"), self)
|
||||
self.label.setText(f"Preview (based on {MetaDataStyle.name[data_style]} tags):")
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowType(
|
||||
self.windowFlags()
|
||||
| QtCore.Qt.WindowType.WindowSystemMenuHint
|
||||
| QtCore.Qt.WindowType.WindowMaximizeButtonHint
|
||||
)
|
||||
)
|
||||
|
||||
self.settings = settings
|
||||
self.comic_archive_list = comic_archive_list
|
||||
self.data_style = data_style
|
||||
self.rename_list: list[RenameItem] = []
|
||||
|
||||
self.btnSettings.clicked.connect(self.modify_settings)
|
||||
self.renamer = FileRenamer(None, platform="universal" if self.settings.rename_strict else "auto")
|
||||
|
||||
self.config_renamer()
|
||||
self.do_preview()
|
||||
|
||||
def config_renamer(self) -> None:
|
||||
self.renamer.set_template(self.settings.rename_template)
|
||||
self.renamer.set_issue_zero_padding(self.settings.rename_issue_number_padding)
|
||||
self.renamer.set_smart_cleanup(self.settings.rename_use_smart_string_cleanup)
|
||||
|
||||
def do_preview(self) -> None:
|
||||
while self.twList.rowCount() > 0:
|
||||
self.twList.removeRow(0)
|
||||
|
||||
self.twList.setSortingEnabled(False)
|
||||
|
||||
for ca in self.comic_archive_list:
|
||||
|
||||
new_ext = ca.path.suffix # default
|
||||
if self.settings.rename_extension_based_on_archive:
|
||||
if ca.is_sevenzip():
|
||||
new_ext = ".cb7"
|
||||
elif ca.is_zip():
|
||||
new_ext = ".cbz"
|
||||
elif ca.is_rar():
|
||||
new_ext = ".cbr"
|
||||
|
||||
md = ca.read_metadata(self.data_style)
|
||||
if md.is_empty:
|
||||
md = ca.metadata_from_filename(
|
||||
self.settings.complicated_parser,
|
||||
self.settings.remove_c2c,
|
||||
self.settings.remove_fcbd,
|
||||
self.settings.remove_publisher,
|
||||
)
|
||||
self.renamer.set_metadata(md)
|
||||
self.renamer.move = self.settings.rename_move_dir
|
||||
|
||||
try:
|
||||
new_name = self.renamer.determine_name(new_ext)
|
||||
except Exception as e:
|
||||
QtWidgets.QMessageBox.critical(
|
||||
self,
|
||||
"Invalid format string!",
|
||||
"Your rename template is invalid!"
|
||||
f"<br/><br/>{e}<br/><br/>"
|
||||
"Please consult the template help in the "
|
||||
"settings and the documentation on the format at "
|
||||
"<a href='https://docs.python.org/3/library/string.html#format-string-syntax'>"
|
||||
"https://docs.python.org/3/library/string.html#format-string-syntax</a>",
|
||||
)
|
||||
return
|
||||
|
||||
row = self.twList.rowCount()
|
||||
self.twList.insertRow(row)
|
||||
folder_item = QtWidgets.QTableWidgetItem()
|
||||
old_name_item = QtWidgets.QTableWidgetItem()
|
||||
new_name_item = QtWidgets.QTableWidgetItem()
|
||||
|
||||
item_text = os.path.split(ca.path)[0]
|
||||
folder_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 0, folder_item)
|
||||
folder_item.setText(item_text)
|
||||
folder_item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
|
||||
item_text = os.path.split(ca.path)[1]
|
||||
old_name_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 1, old_name_item)
|
||||
old_name_item.setText(item_text)
|
||||
old_name_item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
|
||||
|
||||
new_name_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
|
||||
self.twList.setItem(row, 2, new_name_item)
|
||||
new_name_item.setText(new_name)
|
||||
new_name_item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, new_name)
|
||||
|
||||
dict_item = RenameItem(
|
||||
{
|
||||
"archive": ca,
|
||||
"new_name": new_name,
|
||||
}
|
||||
)
|
||||
self.rename_list.append(dict_item)
|
||||
|
||||
# Adjust column sizes
|
||||
self.twList.setVisible(False)
|
||||
self.twList.resizeColumnsToContents()
|
||||
self.twList.setVisible(True)
|
||||
if self.twList.columnWidth(0) > 200:
|
||||
self.twList.setColumnWidth(0, 200)
|
||||
|
||||
self.twList.setSortingEnabled(True)
|
||||
|
||||
def modify_settings(self) -> None:
|
||||
settingswin = SettingsWindow(self, self.settings)
|
||||
settingswin.setModal(True)
|
||||
settingswin.show_rename_tab()
|
||||
settingswin.exec()
|
||||
if settingswin.result():
|
||||
self.config_renamer()
|
||||
self.do_preview()
|
||||
|
||||
def accept(self) -> None:
|
||||
|
||||
prog_dialog = QtWidgets.QProgressDialog("", "Cancel", 0, len(self.rename_list), self)
|
||||
prog_dialog.setWindowTitle("Renaming Archives")
|
||||
prog_dialog.setWindowModality(QtCore.Qt.WindowModality.WindowModal)
|
||||
prog_dialog.setMinimumDuration(100)
|
||||
center_window_on_parent(prog_dialog)
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
|
||||
for idx, item in enumerate(self.rename_list):
|
||||
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
if prog_dialog.wasCanceled():
|
||||
break
|
||||
idx += 1
|
||||
prog_dialog.setValue(idx)
|
||||
prog_dialog.setLabelText(item["new_name"])
|
||||
center_window_on_parent(prog_dialog)
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
|
||||
folder = os.path.dirname(os.path.abspath(item["archive"].path))
|
||||
if self.settings.rename_move_dir and len(self.settings.rename_dir.strip()) > 3:
|
||||
folder = self.settings.rename_dir.strip()
|
||||
|
||||
new_abs_path = utils.unique_file(os.path.join(folder, item["new_name"]))
|
||||
|
||||
if os.path.join(folder, item["new_name"]) == item["archive"].path:
|
||||
logger.info(item["new_name"], "Filename is already good!")
|
||||
continue
|
||||
|
||||
if not item["archive"].is_writable(check_rar_status=False):
|
||||
continue
|
||||
|
||||
os.makedirs(os.path.dirname(new_abs_path), 0o777, True)
|
||||
os.rename(item["archive"].path, new_abs_path)
|
||||
|
||||
item["archive"].rename(new_abs_path)
|
||||
|
||||
prog_dialog.hide()
|
||||
QtCore.QCoreApplication.processEvents()
|
||||
|
||||
QtWidgets.QDialog.accept(self)
|
||||
162
comictaggerlib/resulttypes.py
Normal file
@@ -0,0 +1,162 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from typing_extensions import NotRequired, Required, TypedDict
|
||||
|
||||
from comicapi.comicarchive import ComicArchive
|
||||
|
||||
|
||||
class IssueResult(TypedDict):
|
||||
series: str
|
||||
distance: int
|
||||
issue_number: str
|
||||
cv_issue_count: int
|
||||
url_image_hash: int
|
||||
issue_title: str
|
||||
issue_id: int # int?
|
||||
volume_id: int # int?
|
||||
month: Optional[int]
|
||||
year: Optional[int]
|
||||
publisher: Optional[str]
|
||||
image_url: str
|
||||
thumb_url: str
|
||||
page_url: str
|
||||
description: str
|
||||
|
||||
|
||||
class OnlineMatchResults:
|
||||
def __init__(self) -> None:
|
||||
self.good_matches: List[str] = []
|
||||
self.no_matches: List[str] = []
|
||||
self.multiple_matches: List[MultipleMatch] = []
|
||||
self.low_confidence_matches: List[MultipleMatch] = []
|
||||
self.write_failures: List[str] = []
|
||||
self.fetch_data_failures: List[str] = []
|
||||
|
||||
|
||||
class MultipleMatch:
|
||||
def __init__(self, ca: ComicArchive, match_list: List[IssueResult]) -> None:
|
||||
self.ca: ComicArchive = ca
|
||||
self.matches: list[IssueResult] = match_list
|
||||
|
||||
|
||||
class SelectDetails(TypedDict):
|
||||
image_url: Optional[str]
|
||||
thumb_image_url: Optional[str]
|
||||
cover_date: Optional[str]
|
||||
site_detail_url: Optional[str]
|
||||
|
||||
|
||||
class CVResult(TypedDict):
|
||||
error: str
|
||||
limit: int
|
||||
offset: int
|
||||
number_of_page_results: int
|
||||
number_of_total_results: int
|
||||
status_code: int
|
||||
results: Union[
|
||||
CVIssuesResults,
|
||||
CVIssueDetailResults,
|
||||
CVVolumeResults,
|
||||
list[CVIssuesResults],
|
||||
list[CVVolumeResults],
|
||||
list[CVIssueDetailResults],
|
||||
]
|
||||
version: str
|
||||
|
||||
|
||||
class CVImage(TypedDict, total=False):
|
||||
icon_url: str
|
||||
medium_url: str
|
||||
screen_url: str
|
||||
screen_large_url: str
|
||||
small_url: str
|
||||
super_url: Required[str]
|
||||
thumb_url: str
|
||||
tiny_url: str
|
||||
original_url: str
|
||||
image_tags: str
|
||||
|
||||
|
||||
class CVVolume(TypedDict):
|
||||
api_detail_url: str
|
||||
id: int
|
||||
name: str
|
||||
site_detail_url: str
|
||||
|
||||
|
||||
class CVIssuesResults(TypedDict):
|
||||
cover_date: str
|
||||
description: str
|
||||
id: int
|
||||
image: CVImage
|
||||
issue_number: str
|
||||
name: str
|
||||
site_detail_url: str
|
||||
volume: NotRequired[CVVolume]
|
||||
|
||||
|
||||
class CVPublisher(TypedDict, total=False):
|
||||
api_detail_url: str
|
||||
id: int
|
||||
name: Required[str]
|
||||
|
||||
|
||||
class CVVolumeResults(TypedDict):
|
||||
count_of_issues: int
|
||||
description: NotRequired[str]
|
||||
id: int
|
||||
image: NotRequired[CVImage]
|
||||
name: str
|
||||
publisher: CVPublisher
|
||||
start_year: str
|
||||
resource_type: NotRequired[str]
|
||||
|
||||
|
||||
class CVCredits(TypedDict):
|
||||
api_detail_url: str
|
||||
id: int
|
||||
name: str
|
||||
site_detail_url: str
|
||||
|
||||
|
||||
class CVPersonCredits(TypedDict):
|
||||
api_detail_url: str
|
||||
id: int
|
||||
name: str
|
||||
site_detail_url: str
|
||||
role: str
|
||||
|
||||
|
||||
class CVIssueDetailResults(TypedDict):
|
||||
aliases: None
|
||||
api_detail_url: str
|
||||
character_credits: list[CVCredits]
|
||||
character_died_in: None
|
||||
concept_credits: list[CVCredits]
|
||||
cover_date: str
|
||||
date_added: str
|
||||
date_last_updated: str
|
||||
deck: None
|
||||
description: str
|
||||
first_appearance_characters: None
|
||||
first_appearance_concepts: None
|
||||
first_appearance_locations: None
|
||||
first_appearance_objects: None
|
||||
first_appearance_storyarcs: None
|
||||
first_appearance_teams: None
|
||||
has_staff_review: bool
|
||||
id: int
|
||||
image: CVImage
|
||||
issue_number: str
|
||||
location_credits: list[CVCredits]
|
||||
name: str
|
||||
object_credits: list[CVCredits]
|
||||
person_credits: list[CVPersonCredits]
|
||||
site_detail_url: str
|
||||
store_date: str
|
||||
story_arc_credits: list[CVCredits]
|
||||
team_credits: list[CVCredits]
|
||||
team_disbanded_in: None
|
||||
volume: CVVolume
|
||||
415
comictaggerlib/settings.py
Normal file
@@ -0,0 +1,415 @@
|
||||
"""Settings class for ComicTagger app"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import configparser
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import sys
|
||||
import uuid
|
||||
from typing import Iterator, TextIO, Union, no_type_check
|
||||
|
||||
from comicapi import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ComicTaggerSettings:
|
||||
folder: Union[pathlib.Path, str] = ""
|
||||
|
||||
@staticmethod
|
||||
def get_settings_folder() -> pathlib.Path:
|
||||
if not ComicTaggerSettings.folder:
|
||||
if platform.system() == "Windows":
|
||||
ComicTaggerSettings.folder = pathlib.Path(os.environ["APPDATA"]) / "ComicTagger"
|
||||
else:
|
||||
ComicTaggerSettings.folder = pathlib.Path(os.path.expanduser("~")) / ".ComicTagger"
|
||||
return pathlib.Path(ComicTaggerSettings.folder)
|
||||
|
||||
@staticmethod
|
||||
def base_dir() -> pathlib.Path:
|
||||
if getattr(sys, "frozen", None):
|
||||
return pathlib.Path(sys._MEIPASS)
|
||||
|
||||
return pathlib.Path(__file__).parent
|
||||
|
||||
@staticmethod
|
||||
def get_graphic(filename: Union[str, pathlib.Path]) -> str:
|
||||
return str(ComicTaggerSettings.base_dir() / "graphics" / filename)
|
||||
|
||||
@staticmethod
|
||||
def get_ui_file(filename: Union[str, pathlib.Path]) -> pathlib.Path:
|
||||
return ComicTaggerSettings.base_dir() / "ui" / filename
|
||||
|
||||
def __init__(self, folder: Union[str, pathlib.Path, None]) -> None:
|
||||
# General Settings
|
||||
self.rar_exe_path = ""
|
||||
self.allow_cbi_in_rar = True
|
||||
self.check_for_new_version = False
|
||||
self.send_usage_stats = False
|
||||
|
||||
# automatic settings
|
||||
self.install_id = uuid.uuid4().hex
|
||||
self.last_selected_save_data_style = 0
|
||||
self.last_selected_load_data_style = 0
|
||||
self.last_opened_folder = ""
|
||||
self.last_main_window_width = 0
|
||||
self.last_main_window_height = 0
|
||||
self.last_main_window_x = 0
|
||||
self.last_main_window_y = 0
|
||||
self.last_form_side_width = -1
|
||||
self.last_list_side_width = -1
|
||||
self.last_filelist_sorted_column = -1
|
||||
self.last_filelist_sorted_order = 0
|
||||
|
||||
# identifier settings
|
||||
self.id_length_delta_thresh = 5
|
||||
self.id_publisher_filter = "Panini Comics, Abril, Planeta DeAgostini, Editorial Televisa, Dino Comics"
|
||||
|
||||
# Show/ask dialog flags
|
||||
self.ask_about_cbi_in_rar = True
|
||||
self.show_disclaimer = True
|
||||
self.dont_notify_about_this_version = ""
|
||||
self.ask_about_usage_stats = True
|
||||
|
||||
# filename parsing settings
|
||||
self.complicated_parser = False
|
||||
self.remove_c2c = False
|
||||
self.remove_fcbd = False
|
||||
self.remove_publisher = False
|
||||
|
||||
# Comic Vine settings
|
||||
self.use_series_start_as_volume = False
|
||||
self.clear_form_before_populating_from_cv = False
|
||||
self.remove_html_tables = False
|
||||
self.cv_api_key = ""
|
||||
self.auto_imprint = False
|
||||
|
||||
self.sort_series_by_year = True
|
||||
self.exact_series_matches_first = True
|
||||
self.always_use_publisher_filter = False
|
||||
|
||||
# CBL Tranform settings
|
||||
|
||||
self.assume_lone_credit_is_primary = False
|
||||
self.copy_characters_to_tags = False
|
||||
self.copy_teams_to_tags = False
|
||||
self.copy_locations_to_tags = False
|
||||
self.copy_storyarcs_to_tags = False
|
||||
self.copy_notes_to_comments = False
|
||||
self.copy_weblink_to_comments = False
|
||||
self.apply_cbl_transform_on_cv_import = False
|
||||
self.apply_cbl_transform_on_bulk_operation = False
|
||||
|
||||
# Rename settings
|
||||
self.rename_template = "%series% #%issue% (%year%)"
|
||||
self.rename_issue_number_padding = 3
|
||||
self.rename_use_smart_string_cleanup = True
|
||||
self.rename_extension_based_on_archive = True
|
||||
self.rename_dir = ""
|
||||
self.rename_move_dir = False
|
||||
self.rename_strict = False
|
||||
|
||||
# Auto-tag stickies
|
||||
self.save_on_low_confidence = False
|
||||
self.dont_use_year_when_identifying = False
|
||||
self.assume_1_if_no_issue_num = False
|
||||
self.ignore_leading_numbers_in_filename = False
|
||||
self.remove_archive_after_successful_match = False
|
||||
self.wait_and_retry_on_rate_limit = False
|
||||
|
||||
self.config = configparser.RawConfigParser()
|
||||
if folder:
|
||||
ComicTaggerSettings.folder = pathlib.Path(folder)
|
||||
else:
|
||||
ComicTaggerSettings.folder = ComicTaggerSettings.get_settings_folder()
|
||||
|
||||
if not os.path.exists(ComicTaggerSettings.folder):
|
||||
os.makedirs(ComicTaggerSettings.folder)
|
||||
|
||||
self.settings_file = os.path.join(ComicTaggerSettings.folder, "settings")
|
||||
|
||||
# if config file doesn't exist, write one out
|
||||
if not os.path.exists(self.settings_file):
|
||||
self.save()
|
||||
else:
|
||||
self.load()
|
||||
|
||||
# take a crack at finding rar exe, if not set already
|
||||
if self.rar_exe_path == "":
|
||||
if platform.system() == "Windows":
|
||||
# look in some likely places for Windows machines
|
||||
if os.path.exists(r"C:\Program Files\WinRAR\Rar.exe"):
|
||||
self.rar_exe_path = r"C:\Program Files\WinRAR\Rar.exe"
|
||||
elif os.path.exists(r"C:\Program Files (x86)\WinRAR\Rar.exe"):
|
||||
self.rar_exe_path = r"C:\Program Files (x86)\WinRAR\Rar.exe"
|
||||
else:
|
||||
# see if it's in the path of unix user
|
||||
rarpath = utils.which("rar")
|
||||
if rarpath is not None:
|
||||
self.rar_exe_path = rarpath
|
||||
if self.rar_exe_path != "":
|
||||
self.save()
|
||||
if self.rar_exe_path != "":
|
||||
# make sure rar program is now in the path for the rar class
|
||||
utils.add_to_path(os.path.dirname(self.rar_exe_path))
|
||||
|
||||
def load(self) -> None:
|
||||
def readline_generator(f: TextIO) -> Iterator[str]:
|
||||
line = f.readline()
|
||||
while line:
|
||||
yield line
|
||||
line = f.readline()
|
||||
|
||||
with open(self.settings_file, "r", encoding="utf-8") as f:
|
||||
self.config.read_file(readline_generator(f))
|
||||
|
||||
self.rar_exe_path = self.config.get("settings", "rar_exe_path")
|
||||
if self.config.has_option("settings", "check_for_new_version"):
|
||||
self.check_for_new_version = self.config.getboolean("settings", "check_for_new_version")
|
||||
if self.config.has_option("settings", "send_usage_stats"):
|
||||
self.send_usage_stats = self.config.getboolean("settings", "send_usage_stats")
|
||||
|
||||
if self.config.has_option("auto", "install_id"):
|
||||
self.install_id = self.config.get("auto", "install_id")
|
||||
if self.config.has_option("auto", "last_selected_load_data_style"):
|
||||
self.last_selected_load_data_style = self.config.getint("auto", "last_selected_load_data_style")
|
||||
if self.config.has_option("auto", "last_selected_save_data_style"):
|
||||
self.last_selected_save_data_style = self.config.getint("auto", "last_selected_save_data_style")
|
||||
if self.config.has_option("auto", "last_opened_folder"):
|
||||
self.last_opened_folder = self.config.get("auto", "last_opened_folder")
|
||||
if self.config.has_option("auto", "last_main_window_width"):
|
||||
self.last_main_window_width = self.config.getint("auto", "last_main_window_width")
|
||||
if self.config.has_option("auto", "last_main_window_height"):
|
||||
self.last_main_window_height = self.config.getint("auto", "last_main_window_height")
|
||||
if self.config.has_option("auto", "last_main_window_x"):
|
||||
self.last_main_window_x = self.config.getint("auto", "last_main_window_x")
|
||||
if self.config.has_option("auto", "last_main_window_y"):
|
||||
self.last_main_window_y = self.config.getint("auto", "last_main_window_y")
|
||||
if self.config.has_option("auto", "last_form_side_width"):
|
||||
self.last_form_side_width = self.config.getint("auto", "last_form_side_width")
|
||||
if self.config.has_option("auto", "last_list_side_width"):
|
||||
self.last_list_side_width = self.config.getint("auto", "last_list_side_width")
|
||||
if self.config.has_option("auto", "last_filelist_sorted_column"):
|
||||
self.last_filelist_sorted_column = self.config.getint("auto", "last_filelist_sorted_column")
|
||||
if self.config.has_option("auto", "last_filelist_sorted_order"):
|
||||
self.last_filelist_sorted_order = self.config.getint("auto", "last_filelist_sorted_order")
|
||||
|
||||
if self.config.has_option("identifier", "id_length_delta_thresh"):
|
||||
self.id_length_delta_thresh = self.config.getint("identifier", "id_length_delta_thresh")
|
||||
if self.config.has_option("identifier", "id_publisher_filter"):
|
||||
self.id_publisher_filter = self.config.get("identifier", "id_publisher_filter")
|
||||
|
||||
if self.config.has_option("filenameparser", "complicated_parser"):
|
||||
self.complicated_parser = self.config.getboolean("filenameparser", "complicated_parser")
|
||||
if self.config.has_option("filenameparser", "remove_c2c"):
|
||||
self.remove_c2c = self.config.getboolean("filenameparser", "remove_c2c")
|
||||
if self.config.has_option("filenameparser", "remove_fcbd"):
|
||||
self.remove_fcbd = self.config.getboolean("filenameparser", "remove_fcbd")
|
||||
if self.config.has_option("filenameparser", "remove_publisher"):
|
||||
self.remove_publisher = self.config.getboolean("filenameparser", "remove_publisher")
|
||||
|
||||
if self.config.has_option("dialogflags", "ask_about_cbi_in_rar"):
|
||||
self.ask_about_cbi_in_rar = self.config.getboolean("dialogflags", "ask_about_cbi_in_rar")
|
||||
if self.config.has_option("dialogflags", "show_disclaimer"):
|
||||
self.show_disclaimer = self.config.getboolean("dialogflags", "show_disclaimer")
|
||||
if self.config.has_option("dialogflags", "dont_notify_about_this_version"):
|
||||
self.dont_notify_about_this_version = self.config.get("dialogflags", "dont_notify_about_this_version")
|
||||
if self.config.has_option("dialogflags", "ask_about_usage_stats"):
|
||||
self.ask_about_usage_stats = self.config.getboolean("dialogflags", "ask_about_usage_stats")
|
||||
|
||||
if self.config.has_option("comicvine", "use_series_start_as_volume"):
|
||||
self.use_series_start_as_volume = self.config.getboolean("comicvine", "use_series_start_as_volume")
|
||||
if self.config.has_option("comicvine", "clear_form_before_populating_from_cv"):
|
||||
self.clear_form_before_populating_from_cv = self.config.getboolean(
|
||||
"comicvine", "clear_form_before_populating_from_cv"
|
||||
)
|
||||
if self.config.has_option("comicvine", "remove_html_tables"):
|
||||
self.remove_html_tables = self.config.getboolean("comicvine", "remove_html_tables")
|
||||
|
||||
if self.config.has_option("comicvine", "sort_series_by_year"):
|
||||
self.sort_series_by_year = self.config.getboolean("comicvine", "sort_series_by_year")
|
||||
if self.config.has_option("comicvine", "exact_series_matches_first"):
|
||||
self.exact_series_matches_first = self.config.getboolean("comicvine", "exact_series_matches_first")
|
||||
if self.config.has_option("comicvine", "always_use_publisher_filter"):
|
||||
self.always_use_publisher_filter = self.config.getboolean("comicvine", "always_use_publisher_filter")
|
||||
|
||||
if self.config.has_option("comicvine", "cv_api_key"):
|
||||
self.cv_api_key = self.config.get("comicvine", "cv_api_key")
|
||||
|
||||
if self.config.has_option("cbl_transform", "assume_lone_credit_is_primary"):
|
||||
self.assume_lone_credit_is_primary = self.config.getboolean(
|
||||
"cbl_transform", "assume_lone_credit_is_primary"
|
||||
)
|
||||
if self.config.has_option("cbl_transform", "copy_characters_to_tags"):
|
||||
self.copy_characters_to_tags = self.config.getboolean("cbl_transform", "copy_characters_to_tags")
|
||||
if self.config.has_option("cbl_transform", "copy_teams_to_tags"):
|
||||
self.copy_teams_to_tags = self.config.getboolean("cbl_transform", "copy_teams_to_tags")
|
||||
if self.config.has_option("cbl_transform", "copy_locations_to_tags"):
|
||||
self.copy_locations_to_tags = self.config.getboolean("cbl_transform", "copy_locations_to_tags")
|
||||
if self.config.has_option("cbl_transform", "copy_notes_to_comments"):
|
||||
self.copy_notes_to_comments = self.config.getboolean("cbl_transform", "copy_notes_to_comments")
|
||||
if self.config.has_option("cbl_transform", "copy_storyarcs_to_tags"):
|
||||
self.copy_storyarcs_to_tags = self.config.getboolean("cbl_transform", "copy_storyarcs_to_tags")
|
||||
if self.config.has_option("cbl_transform", "copy_weblink_to_comments"):
|
||||
self.copy_weblink_to_comments = self.config.getboolean("cbl_transform", "copy_weblink_to_comments")
|
||||
if self.config.has_option("cbl_transform", "apply_cbl_transform_on_cv_import"):
|
||||
self.apply_cbl_transform_on_cv_import = self.config.getboolean(
|
||||
"cbl_transform", "apply_cbl_transform_on_cv_import"
|
||||
)
|
||||
if self.config.has_option("cbl_transform", "apply_cbl_transform_on_bulk_operation"):
|
||||
self.apply_cbl_transform_on_bulk_operation = self.config.getboolean(
|
||||
"cbl_transform", "apply_cbl_transform_on_bulk_operation"
|
||||
)
|
||||
|
||||
if self.config.has_option("rename", "rename_template"):
|
||||
self.rename_template = self.config.get("rename", "rename_template")
|
||||
if self.config.has_option("rename", "rename_issue_number_padding"):
|
||||
self.rename_issue_number_padding = self.config.getint("rename", "rename_issue_number_padding")
|
||||
if self.config.has_option("rename", "rename_use_smart_string_cleanup"):
|
||||
self.rename_use_smart_string_cleanup = self.config.getboolean("rename", "rename_use_smart_string_cleanup")
|
||||
if self.config.has_option("rename", "rename_extension_based_on_archive"):
|
||||
self.rename_extension_based_on_archive = self.config.getboolean(
|
||||
"rename", "rename_extension_based_on_archive"
|
||||
)
|
||||
if self.config.has_option("rename", "rename_dir"):
|
||||
self.rename_dir = self.config.get("rename", "rename_dir")
|
||||
if self.config.has_option("rename", "rename_move_dir"):
|
||||
self.rename_move_dir = self.config.getboolean("rename", "rename_move_dir")
|
||||
if self.config.has_option("rename", "rename_strict"):
|
||||
self.rename_strict = self.config.getboolean("rename", "rename_strict")
|
||||
|
||||
if self.config.has_option("autotag", "save_on_low_confidence"):
|
||||
self.save_on_low_confidence = self.config.getboolean("autotag", "save_on_low_confidence")
|
||||
if self.config.has_option("autotag", "dont_use_year_when_identifying"):
|
||||
self.dont_use_year_when_identifying = self.config.getboolean("autotag", "dont_use_year_when_identifying")
|
||||
if self.config.has_option("autotag", "assume_1_if_no_issue_num"):
|
||||
self.assume_1_if_no_issue_num = self.config.getboolean("autotag", "assume_1_if_no_issue_num")
|
||||
if self.config.has_option("autotag", "ignore_leading_numbers_in_filename"):
|
||||
self.ignore_leading_numbers_in_filename = self.config.getboolean(
|
||||
"autotag", "ignore_leading_numbers_in_filename"
|
||||
)
|
||||
if self.config.has_option("autotag", "remove_archive_after_successful_match"):
|
||||
self.remove_archive_after_successful_match = self.config.getboolean(
|
||||
"autotag", "remove_archive_after_successful_match"
|
||||
)
|
||||
if self.config.has_option("autotag", "wait_and_retry_on_rate_limit"):
|
||||
self.wait_and_retry_on_rate_limit = self.config.getboolean("autotag", "wait_and_retry_on_rate_limit")
|
||||
if self.config.has_option("autotag", "auto_imprint"):
|
||||
self.auto_imprint = self.config.getboolean("autotag", "auto_imprint")
|
||||
|
||||
@no_type_check
|
||||
def save(self) -> None:
|
||||
|
||||
if not self.config.has_section("settings"):
|
||||
self.config.add_section("settings")
|
||||
|
||||
self.config.set("settings", "check_for_new_version", self.check_for_new_version)
|
||||
self.config.set("settings", "rar_exe_path", self.rar_exe_path)
|
||||
self.config.set("settings", "send_usage_stats", self.send_usage_stats)
|
||||
|
||||
if not self.config.has_section("auto"):
|
||||
self.config.add_section("auto")
|
||||
|
||||
self.config.set("auto", "install_id", self.install_id)
|
||||
self.config.set("auto", "last_selected_load_data_style", self.last_selected_load_data_style)
|
||||
self.config.set("auto", "last_selected_save_data_style", self.last_selected_save_data_style)
|
||||
self.config.set("auto", "last_opened_folder", self.last_opened_folder)
|
||||
self.config.set("auto", "last_main_window_width", self.last_main_window_width)
|
||||
self.config.set("auto", "last_main_window_height", self.last_main_window_height)
|
||||
self.config.set("auto", "last_main_window_x", self.last_main_window_x)
|
||||
self.config.set("auto", "last_main_window_y", self.last_main_window_y)
|
||||
self.config.set("auto", "last_form_side_width", self.last_form_side_width)
|
||||
self.config.set("auto", "last_list_side_width", self.last_list_side_width)
|
||||
self.config.set("auto", "last_filelist_sorted_column", self.last_filelist_sorted_column)
|
||||
self.config.set("auto", "last_filelist_sorted_order", self.last_filelist_sorted_order)
|
||||
|
||||
if not self.config.has_section("identifier"):
|
||||
self.config.add_section("identifier")
|
||||
|
||||
self.config.set("identifier", "id_length_delta_thresh", self.id_length_delta_thresh)
|
||||
self.config.set("identifier", "id_publisher_filter", self.id_publisher_filter)
|
||||
|
||||
if not self.config.has_section("dialogflags"):
|
||||
self.config.add_section("dialogflags")
|
||||
|
||||
self.config.set("dialogflags", "ask_about_cbi_in_rar", self.ask_about_cbi_in_rar)
|
||||
self.config.set("dialogflags", "show_disclaimer", self.show_disclaimer)
|
||||
self.config.set("dialogflags", "dont_notify_about_this_version", self.dont_notify_about_this_version)
|
||||
self.config.set("dialogflags", "ask_about_usage_stats", self.ask_about_usage_stats)
|
||||
|
||||
if not self.config.has_section("filenameparser"):
|
||||
self.config.add_section("filenameparser")
|
||||
|
||||
self.config.set("filenameparser", "complicated_parser", self.complicated_parser)
|
||||
self.config.set("filenameparser", "remove_c2c", self.remove_c2c)
|
||||
self.config.set("filenameparser", "remove_fcbd", self.remove_fcbd)
|
||||
self.config.set("filenameparser", "remove_publisher", self.remove_publisher)
|
||||
|
||||
if not self.config.has_section("comicvine"):
|
||||
self.config.add_section("comicvine")
|
||||
|
||||
self.config.set("comicvine", "use_series_start_as_volume", self.use_series_start_as_volume)
|
||||
self.config.set("comicvine", "clear_form_before_populating_from_cv", self.clear_form_before_populating_from_cv)
|
||||
self.config.set("comicvine", "remove_html_tables", self.remove_html_tables)
|
||||
|
||||
self.config.set("comicvine", "sort_series_by_year", self.sort_series_by_year)
|
||||
self.config.set("comicvine", "exact_series_matches_first", self.exact_series_matches_first)
|
||||
self.config.set("comicvine", "always_use_publisher_filter", self.always_use_publisher_filter)
|
||||
|
||||
self.config.set("comicvine", "cv_api_key", self.cv_api_key)
|
||||
|
||||
if not self.config.has_section("cbl_transform"):
|
||||
self.config.add_section("cbl_transform")
|
||||
|
||||
self.config.set("cbl_transform", "assume_lone_credit_is_primary", self.assume_lone_credit_is_primary)
|
||||
self.config.set("cbl_transform", "copy_characters_to_tags", self.copy_characters_to_tags)
|
||||
self.config.set("cbl_transform", "copy_teams_to_tags", self.copy_teams_to_tags)
|
||||
self.config.set("cbl_transform", "copy_locations_to_tags", self.copy_locations_to_tags)
|
||||
self.config.set("cbl_transform", "copy_storyarcs_to_tags", self.copy_storyarcs_to_tags)
|
||||
self.config.set("cbl_transform", "copy_notes_to_comments", self.copy_notes_to_comments)
|
||||
self.config.set("cbl_transform", "copy_weblink_to_comments", self.copy_weblink_to_comments)
|
||||
self.config.set("cbl_transform", "apply_cbl_transform_on_cv_import", self.apply_cbl_transform_on_cv_import)
|
||||
self.config.set(
|
||||
"cbl_transform",
|
||||
"apply_cbl_transform_on_bulk_operation",
|
||||
self.apply_cbl_transform_on_bulk_operation,
|
||||
)
|
||||
|
||||
if not self.config.has_section("rename"):
|
||||
self.config.add_section("rename")
|
||||
|
||||
self.config.set("rename", "rename_template", self.rename_template)
|
||||
self.config.set("rename", "rename_issue_number_padding", self.rename_issue_number_padding)
|
||||
self.config.set("rename", "rename_use_smart_string_cleanup", self.rename_use_smart_string_cleanup)
|
||||
self.config.set("rename", "rename_extension_based_on_archive", self.rename_extension_based_on_archive)
|
||||
self.config.set("rename", "rename_dir", self.rename_dir)
|
||||
self.config.set("rename", "rename_move_dir", self.rename_move_dir)
|
||||
self.config.set("rename", "rename_strict", self.rename_strict)
|
||||
|
||||
if not self.config.has_section("autotag"):
|
||||
self.config.add_section("autotag")
|
||||
self.config.set("autotag", "save_on_low_confidence", self.save_on_low_confidence)
|
||||
self.config.set("autotag", "dont_use_year_when_identifying", self.dont_use_year_when_identifying)
|
||||
self.config.set("autotag", "assume_1_if_no_issue_num", self.assume_1_if_no_issue_num)
|
||||
self.config.set("autotag", "ignore_leading_numbers_in_filename", self.ignore_leading_numbers_in_filename)
|
||||
self.config.set("autotag", "remove_archive_after_successful_match", self.remove_archive_after_successful_match)
|
||||
self.config.set("autotag", "wait_and_retry_on_rate_limit", self.wait_and_retry_on_rate_limit)
|
||||
self.config.set("autotag", "auto_imprint", self.auto_imprint)
|
||||
|
||||
with open(self.settings_file, "w", encoding="utf-8") as configfile:
|
||||
self.config.write(configfile)
|
||||
379
comictaggerlib/settingswindow.py
Normal file
@@ -0,0 +1,379 @@
|
||||
"""A PyQT4 dialog to enter app settings"""
|
||||
|
||||
# Copyright 2012-2014 Anthony Beville
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
from typing import Optional
|
||||
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets, uic
|
||||
|
||||
from comicapi import utils
|
||||
from comicapi.genericmetadata import md_test
|
||||
from comictaggerlib.comicvinecacher import ComicVineCacher
|
||||
from comictaggerlib.comicvinetalker import ComicVineTalker
|
||||
from comictaggerlib.filerenamer import FileRenamer
|
||||
from comictaggerlib.imagefetcher import ImageFetcher
|
||||
from comictaggerlib.settings import ComicTaggerSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
windowsRarHelp = """
|
||||
<html><head/><body><p>To write to CBR/RAR archives,
|
||||
you will need to have the tools from
|
||||
<span style=" text-decoration: underline; color:#0000ff;">
|
||||
<a href="http://www.win-rar.com/download.html">WINRar</a></span>
|
||||
installed. (ComicTagger only uses the command-line rar tool,
|
||||
which is free to use.)</p></body></html>
|
||||
"""
|
||||
|
||||
linuxRarHelp = """
|
||||
<html><head/><body><p>To write to CBR/RAR archives,
|
||||
you will need to have the shareware rar tool from RARLab installed.
|
||||
Your package manager should have rar (e.g. "apt-get install rar"). If not, download it
|
||||
<span style=" text-decoration: underline; color:#0000ff;">
|
||||
<a href="https://www.rarlab.com/download.htm">here</a></span>,
|
||||
and install in your path. </p></body></html>
|
||||
"""
|
||||
|
||||
macRarHelp = """
|
||||
<html><head/><body><p>To write to CBR/RAR archives,
|
||||
you will need the rar tool. The easiest way to get this is
|
||||
to install <span style=" text-decoration: underline; color:#0000ff;">
|
||||
<a href="https://brew.sh/">homebrew</a></span>.
|
||||
</p>Once homebrew is installed, run: <b>brew install caskroom/cask/rar</b></body></html>
|
||||
"""
|
||||
|
||||
|
||||
template_tooltip = """
|
||||
<pre>The template for the new filename. Uses python format strings https://docs.python.org/3/library/string.html#format-string-syntax
|
||||
Accepts the following variables:
|
||||
{is_empty} (boolean)
|
||||
{tag_origin} (string)
|
||||
{series} (string)
|
||||
{issue} (string)
|
||||
{title} (string)
|
||||
{publisher} (string)
|
||||
{month} (integer)
|
||||
{year} (integer)
|
||||
{day} (integer)
|
||||
{issue_count} (integer)
|
||||
{volume} (integer)
|
||||
{genre} (string)
|
||||
{language} (string)
|
||||
{comments} (string)
|
||||
{volume_count} (integer)
|
||||
{critical_rating} (string)
|
||||
{country} (string)
|
||||
{alternate_series} (string)
|
||||
{alternate_number} (string)
|
||||
{alternate_count} (integer)
|
||||
{imprint} (string)
|
||||
{notes} (string)
|
||||
{web_link} (string)
|
||||
{format} (string)
|
||||
{manga} (string)
|
||||
{black_and_white} (boolean)
|
||||
{page_count} (integer)
|
||||
{maturity_rating} (string)
|
||||
{community_rating} (string)
|
||||
{story_arc} (string)
|
||||
{series_group} (string)
|
||||
{scan_info} (string)
|
||||
{characters} (string)
|
||||
{teams} (string)
|
||||
{locations} (string)
|
||||
{credits} (list of dict({'role': string, 'person': string, 'primary': boolean}))
|
||||
{tags} (list of str)
|
||||
{pages} (list of dict({'Image': string(int), 'Type': string}))
|
||||
|
||||
CoMet-only items:
|
||||
{price} (float)
|
||||
{is_version_of} (string)
|
||||
{rights} (string)
|
||||
{identifier} (string)
|
||||
{last_mark} (string)
|
||||
{cover_image} (string)
|
||||
|
||||
Examples:
|
||||
|
||||
{series} {issue} ({year})
|
||||
Spider-Geddon 1 (2018)
|
||||
|
||||
{series} #{issue} - {title}
|
||||
Spider-Geddon #1 - New Players; Check In
|
||||
</pre>
|
||||
"""
|
||||
|
||||
|
||||
class SettingsWindow(QtWidgets.QDialog):
|
||||
def __init__(self, parent: QtWidgets.QWidget, settings: ComicTaggerSettings) -> None:
|
||||
super().__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("settingswindow.ui"), self)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowType(self.windowFlags() & ~QtCore.Qt.WindowType.WindowContextHelpButtonHint)
|
||||
)
|
||||
|
||||
self.settings = settings
|
||||
self.name = "Settings"
|
||||
|
||||
if platform.system() == "Windows":
|
||||
self.lblRarHelp.setText(windowsRarHelp)
|
||||
|
||||
elif platform.system() == "Linux":
|
||||
self.lblRarHelp.setText(linuxRarHelp)
|
||||
|
||||
elif platform.system() == "Darwin":
|
||||
self.leRarExePath.setReadOnly(False)
|
||||
|
||||
self.lblRarHelp.setText(macRarHelp)
|
||||
self.name = "Preferences"
|
||||
|
||||
self.setWindowTitle("ComicTagger " + self.name)
|
||||
self.lblDefaultSettings.setText("Revert to default " + self.name.lower())
|
||||
self.btnResetSettings.setText("Default " + self.name)
|
||||
|
||||
nldt_tip = """<html>The <b>Default Name Length Match Tolerance</b> is for eliminating automatic
|
||||
search matches that are too long compared to your series name search. The higher
|
||||
it is, the more likely to have a good match, but each search will take longer and
|
||||
use more bandwidth. Too low, and only the very closest lexical matches will be
|
||||
explored.</html>"""
|
||||
|
||||
self.leNameLengthDeltaThresh.setToolTip(nldt_tip)
|
||||
|
||||
pbl_tip = """<html>
|
||||
The <b>Publisher Filter</b> is for eliminating automatic matches to certain publishers
|
||||
that you know are incorrect. Useful for avoiding international re-prints with same
|
||||
covers or series names. Enter publisher names separated by commas.
|
||||
</html>"""
|
||||
self.tePublisherFilter.setToolTip(pbl_tip)
|
||||
|
||||
validator = QtGui.QIntValidator(1, 4, self)
|
||||
self.leIssueNumPadding.setValidator(validator)
|
||||
|
||||
validator = QtGui.QIntValidator(0, 99, self)
|
||||
self.leNameLengthDeltaThresh.setValidator(validator)
|
||||
|
||||
self.leRenameTemplate.setToolTip(template_tooltip)
|
||||
self.settings_to_form()
|
||||
self.rename_error: Optional[Exception] = None
|
||||
self.rename_test()
|
||||
|
||||
self.btnBrowseRar.clicked.connect(self.select_rar)
|
||||
self.btnClearCache.clicked.connect(self.clear_cache)
|
||||
self.btnResetSettings.clicked.connect(self.reset_settings)
|
||||
self.btnTestKey.clicked.connect(self.test_api_key)
|
||||
self.btnTemplateHelp.clicked.connect(self.show_template_help)
|
||||
self.leRenameTemplate.textEdited.connect(self.rename__test)
|
||||
self.cbxMoveFiles.clicked.connect(self.rename_test)
|
||||
self.cbxRenameStrict.clicked.connect(self.rename_test)
|
||||
self.leDirectory.textEdited.connect(self.rename_test)
|
||||
self.cbxComplicatedParser.clicked.connect(self.switch_parser)
|
||||
|
||||
def rename_test(self) -> None:
|
||||
self.rename__test(self.leRenameTemplate.text())
|
||||
|
||||
def rename__test(self, template: str) -> None:
|
||||
fr = FileRenamer(md_test, platform="universal" if self.cbxRenameStrict.isChecked() else "auto")
|
||||
fr.move = self.cbxMoveFiles.isChecked()
|
||||
fr.set_template(template)
|
||||
fr.set_issue_zero_padding(int(self.leIssueNumPadding.text()))
|
||||
fr.set_smart_cleanup(self.cbxSmartCleanup.isChecked())
|
||||
try:
|
||||
self.lblRenameTest.setText(fr.determine_name(".cbz"))
|
||||
self.rename_error = None
|
||||
except Exception as e:
|
||||
self.rename_error = e
|
||||
self.lblRenameTest.setText(str(e))
|
||||
|
||||
def switch_parser(self) -> None:
|
||||
complicated = self.cbxComplicatedParser.isChecked()
|
||||
|
||||
self.cbxRemoveC2C.setEnabled(complicated)
|
||||
self.cbxRemoveFCBD.setEnabled(complicated)
|
||||
self.cbxRemovePublisher.setEnabled(complicated)
|
||||
|
||||
def settings_to_form(self) -> None:
|
||||
# Copy values from settings to form
|
||||
self.leRarExePath.setText(self.settings.rar_exe_path)
|
||||
self.leNameLengthDeltaThresh.setText(str(self.settings.id_length_delta_thresh))
|
||||
self.tePublisherFilter.setPlainText(self.settings.id_publisher_filter)
|
||||
|
||||
self.cbxCheckForNewVersion.setChecked(self.settings.check_for_new_version)
|
||||
|
||||
self.cbxComplicatedParser.setChecked(self.settings.complicated_parser)
|
||||
self.cbxRemoveC2C.setChecked(self.settings.remove_c2c)
|
||||
self.cbxRemoveFCBD.setChecked(self.settings.remove_fcbd)
|
||||
self.cbxRemovePublisher.setChecked(self.settings.remove_publisher)
|
||||
self.switch_parser()
|
||||
|
||||
self.cbxUseSeriesStartAsVolume.setChecked(self.settings.use_series_start_as_volume)
|
||||
self.cbxClearFormBeforePopulating.setChecked(self.settings.clear_form_before_populating_from_cv)
|
||||
self.cbxRemoveHtmlTables.setChecked(self.settings.remove_html_tables)
|
||||
|
||||
self.cbxUseFilter.setChecked(self.settings.always_use_publisher_filter)
|
||||
self.cbxSortByYear.setChecked(self.settings.sort_series_by_year)
|
||||
self.cbxExactMatches.setChecked(self.settings.exact_series_matches_first)
|
||||
|
||||
self.leKey.setText(str(self.settings.cv_api_key))
|
||||
|
||||
self.cbxAssumeLoneCreditIsPrimary.setChecked(self.settings.assume_lone_credit_is_primary)
|
||||
self.cbxCopyCharactersToTags.setChecked(self.settings.copy_characters_to_tags)
|
||||
self.cbxCopyTeamsToTags.setChecked(self.settings.copy_teams_to_tags)
|
||||
self.cbxCopyLocationsToTags.setChecked(self.settings.copy_locations_to_tags)
|
||||
self.cbxCopyStoryArcsToTags.setChecked(self.settings.copy_storyarcs_to_tags)
|
||||
self.cbxCopyNotesToComments.setChecked(self.settings.copy_notes_to_comments)
|
||||
self.cbxCopyWebLinkToComments.setChecked(self.settings.copy_weblink_to_comments)
|
||||
self.cbxApplyCBLTransformOnCVIMport.setChecked(self.settings.apply_cbl_transform_on_cv_import)
|
||||
self.cbxApplyCBLTransformOnBatchOperation.setChecked(self.settings.apply_cbl_transform_on_bulk_operation)
|
||||
|
||||
self.leRenameTemplate.setText(self.settings.rename_template)
|
||||
self.leIssueNumPadding.setText(str(self.settings.rename_issue_number_padding))
|
||||
self.cbxSmartCleanup.setChecked(self.settings.rename_use_smart_string_cleanup)
|
||||
self.cbxChangeExtension.setChecked(self.settings.rename_extension_based_on_archive)
|
||||
self.cbxMoveFiles.setChecked(self.settings.rename_move_dir)
|
||||
self.leDirectory.setText(self.settings.rename_dir)
|
||||
self.cbxRenameStrict.setChecked(self.settings.rename_strict)
|
||||
|
||||
def accept(self) -> None:
|
||||
self.rename_test()
|
||||
if self.rename_error is not None:
|
||||
QtWidgets.QMessageBox.critical(
|
||||
self,
|
||||
"Invalid format string!",
|
||||
"Your rename template is invalid!"
|
||||
f"<br/><br/>{self.rename_error}<br/><br/>"
|
||||
"Please consult the template help in the "
|
||||
"settings and the documentation on the format at "
|
||||
"<a href='https://docs.python.org/3/library/string.html#format-string-syntax'>"
|
||||
"https://docs.python.org/3/library/string.html#format-string-syntax</a>",
|
||||
)
|
||||
return
|
||||
|
||||
# Copy values from form to settings and save
|
||||
self.settings.rar_exe_path = str(self.leRarExePath.text())
|
||||
|
||||
# make sure rar program is now in the path for the rar class
|
||||
if self.settings.rar_exe_path:
|
||||
utils.add_to_path(os.path.dirname(self.settings.rar_exe_path))
|
||||
|
||||
if not str(self.leNameLengthDeltaThresh.text()).isdigit():
|
||||
self.leNameLengthDeltaThresh.setText("0")
|
||||
|
||||
if not str(self.leIssueNumPadding.text()).isdigit():
|
||||
self.leIssueNumPadding.setText("0")
|
||||
|
||||
self.settings.check_for_new_version = self.cbxCheckForNewVersion.isChecked()
|
||||
|
||||
self.settings.id_length_delta_thresh = int(self.leNameLengthDeltaThresh.text())
|
||||
self.settings.id_publisher_filter = str(self.tePublisherFilter.toPlainText())
|
||||
|
||||
self.settings.complicated_parser = self.cbxComplicatedParser.isChecked()
|
||||
self.settings.remove_c2c = self.cbxRemoveC2C.isChecked()
|
||||
self.settings.remove_fcbd = self.cbxRemoveFCBD.isChecked()
|
||||
self.settings.remove_publisher = self.cbxRemovePublisher.isChecked()
|
||||
|
||||
self.settings.use_series_start_as_volume = self.cbxUseSeriesStartAsVolume.isChecked()
|
||||
self.settings.clear_form_before_populating_from_cv = self.cbxClearFormBeforePopulating.isChecked()
|
||||
self.settings.remove_html_tables = self.cbxRemoveHtmlTables.isChecked()
|
||||
|
||||
self.settings.always_use_publisher_filter = self.cbxUseFilter.isChecked()
|
||||
self.settings.sort_series_by_year = self.cbxSortByYear.isChecked()
|
||||
self.settings.exact_series_matches_first = self.cbxExactMatches.isChecked()
|
||||
|
||||
self.settings.cv_api_key = str(self.leKey.text())
|
||||
ComicVineTalker.api_key = self.settings.cv_api_key.strip()
|
||||
self.settings.assume_lone_credit_is_primary = self.cbxAssumeLoneCreditIsPrimary.isChecked()
|
||||
self.settings.copy_characters_to_tags = self.cbxCopyCharactersToTags.isChecked()
|
||||
self.settings.copy_teams_to_tags = self.cbxCopyTeamsToTags.isChecked()
|
||||
self.settings.copy_locations_to_tags = self.cbxCopyLocationsToTags.isChecked()
|
||||
self.settings.copy_storyarcs_to_tags = self.cbxCopyStoryArcsToTags.isChecked()
|
||||
self.settings.copy_notes_to_comments = self.cbxCopyNotesToComments.isChecked()
|
||||
self.settings.copy_weblink_to_comments = self.cbxCopyWebLinkToComments.isChecked()
|
||||
self.settings.apply_cbl_transform_on_cv_import = self.cbxApplyCBLTransformOnCVIMport.isChecked()
|
||||
self.settings.apply_cbl_transform_on_bulk_operation = self.cbxApplyCBLTransformOnBatchOperation.isChecked()
|
||||
|
||||
self.settings.rename_template = str(self.leRenameTemplate.text())
|
||||
self.settings.rename_issue_number_padding = int(self.leIssueNumPadding.text())
|
||||
self.settings.rename_use_smart_string_cleanup = self.cbxSmartCleanup.isChecked()
|
||||
self.settings.rename_extension_based_on_archive = self.cbxChangeExtension.isChecked()
|
||||
self.settings.rename_move_dir = self.cbxMoveFiles.isChecked()
|
||||
self.settings.rename_dir = self.leDirectory.text()
|
||||
|
||||
self.settings.rename_strict = self.cbxRenameStrict.isChecked()
|
||||
|
||||
self.settings.save()
|
||||
QtWidgets.QDialog.accept(self)
|
||||
|
||||
def select_rar(self) -> None:
|
||||
self.select_file(self.leRarExePath, "RAR")
|
||||
|
||||
def clear_cache(self) -> None:
|
||||
ImageFetcher().clear_cache()
|
||||
ComicVineCacher().clear_cache()
|
||||
QtWidgets.QMessageBox.information(self, self.name, "Cache has been cleared.")
|
||||
|
||||
def test_api_key(self) -> None:
|
||||
if ComicVineTalker().test_key(str(self.leKey.text()).strip()):
|
||||
QtWidgets.QMessageBox.information(self, "API Key Test", "Key is valid!")
|
||||
else:
|
||||
QtWidgets.QMessageBox.warning(self, "API Key Test", "Key is NOT valid.")
|
||||
|
||||
def reset_settings(self) -> None:
|
||||
self.settings.reset()
|
||||
self.settings_to_form()
|
||||
QtWidgets.QMessageBox.information(self, self.name, self.name + " have been returned to default values.")
|
||||
|
||||
def select_file(self, control: QtWidgets.QLineEdit, name: str) -> None:
|
||||
|
||||
dialog = QtWidgets.QFileDialog(self)
|
||||
dialog.setFileMode(QtWidgets.QFileDialog.FileMode.ExistingFile)
|
||||
|
||||
if platform.system() == "Windows":
|
||||
if name == "RAR":
|
||||
flt = "Rar Program (Rar.exe)"
|
||||
else:
|
||||
flt = "Libraries (*.dll)"
|
||||
dialog.setNameFilter(flt)
|
||||
else:
|
||||
dialog.setFilter(QtCore.QDir.Filter.Files)
|
||||
|
||||
dialog.setDirectory(os.path.dirname(str(control.text())))
|
||||
if name == "RAR":
|
||||
dialog.setWindowTitle("Find " + name + " program")
|
||||
else:
|
||||
dialog.setWindowTitle("Find " + name + " library")
|
||||
|
||||
if dialog.exec():
|
||||
file_list = dialog.selectedFiles()
|
||||
control.setText(str(file_list[0]))
|
||||
|
||||
def show_rename_tab(self) -> None:
|
||||
self.tabWidget.setCurrentIndex(5)
|
||||
|
||||
def show_template_help(self) -> None:
|
||||
template_help_win = TemplateHelpWindow(self)
|
||||
template_help_win.setModal(False)
|
||||
template_help_win.show()
|
||||
|
||||
|
||||
class TemplateHelpWindow(QtWidgets.QDialog):
|
||||
def __init__(self, parent: QtWidgets.QWidget) -> None:
|
||||
super(TemplateHelpWindow, self).__init__(parent)
|
||||
|
||||
uic.loadUi(ComicTaggerSettings.get_ui_file("TemplateHelp.ui"), self)
|
||||
2155
comictaggerlib/taggerwindow.py
Normal file
135
comictaggerlib/ui/TemplateHelp.ui
Normal file
@@ -0,0 +1,135 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>Dialog</class>
|
||||
<widget class="QDialog" name="Dialog">
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>702</width>
|
||||
<height>452</height>
|
||||
</rect>
|
||||
</property>
|
||||
<property name="windowTitle">
|
||||
<string>Template Help</string>
|
||||
</property>
|
||||
<property name="sizeGripEnabled">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<layout class="QVBoxLayout" name="verticalLayout">
|
||||
<property name="spacing">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="leftMargin">
|
||||
<number>2</number>
|
||||
</property>
|
||||
<property name="topMargin">
|
||||
<number>2</number>
|
||||
</property>
|
||||
<item>
|
||||
<widget class="QTextBrowser" name="textEdit">
|
||||
<property name="readOnly">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<property name="html">
|
||||
<string><html>
|
||||
<head>
|
||||
<style>
|
||||
table {
|
||||
font-family: arial, sans-serif;
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
td, th {
|
||||
border: 1px solid #dddddd;
|
||||
text-align: left;
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
tr:nth-child(even) {
|
||||
background-color: #dddddd;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1 style="text-align: center">Template help</h1>
|
||||
<p>The template uses Python format strings, in the simplest use it replaces the field (e.g. {issue}) with the value for that particular comic (e.g. 1) for advanced formatting please reference the
|
||||
|
||||
<a href="https://docs.python.org/3/library/string.html#format-string-syntax">Python 3 documentation</a></p>
|
||||
Accepts the following variables:
|
||||
<table>
|
||||
<tr>
|
||||
<th>Tag name</th>
|
||||
<th>Type</th>
|
||||
</tr>
|
||||
<tr><td>{is_empty}</td><td>boolean</td></tr>
|
||||
<tr><td>{tag_origin}</td><td>string</td></tr>
|
||||
<tr><td>{series}</td><td>string</td></tr>
|
||||
<tr><td>{issue}</td><td>string</td></tr>
|
||||
<tr><td>{title}</td><td>string</td></tr>
|
||||
<tr><td>{publisher}</td><td>string</td></tr>
|
||||
<tr><td>{month}</td><td>integer</td></tr>
|
||||
<tr><td>{year}</td><td>integer</td></tr>
|
||||
<tr><td>{day}</td><td>integer</td></tr>
|
||||
<tr><td>{issue_count}</td><td>integer</td></tr>
|
||||
<tr><td>{volume}</td><td>integer</td></tr>
|
||||
<tr><td>{genre}</td><td>string</td></tr>
|
||||
<tr><td>{language}</td><td>string</td></tr>
|
||||
<tr><td>{comments}</td><td>string</td></tr>
|
||||
<tr><td>{volume_count}</td><td>integer</td></tr>
|
||||
<tr><td>{critical_rating}</td><td>string</td></tr>
|
||||
<tr><td>{country}</td><td>string</td></tr>
|
||||
<tr><td>{alternate_series}</td><td>string</td></tr>
|
||||
<tr><td>{alternate_number}</td><td>string</td></tr>
|
||||
<tr><td>{alternate_count}</td><td>integer</td></tr>
|
||||
<tr><td>{imprint}</td><td>string</td></tr>
|
||||
<tr><td>{notes}</td><td>string</td></tr>
|
||||
<tr><td>{web_link}</td><td>string</td></tr>
|
||||
<tr><td>{format}</td><td>string</td></tr>
|
||||
<tr><td>{manga}</td><td>string</td></tr>
|
||||
<tr><td>{black_and_white}</td><td>boolean</td></tr>
|
||||
<tr><td>{page_count}</td><td>integer</td></tr>
|
||||
<tr><td>{maturity_rating}</td><td>string</td></tr>
|
||||
<tr><td>{community_rating}</td><td>string</td></tr>
|
||||
<tr><td>{story_arc}</td><td>string</td></tr>
|
||||
<tr><td>{series_group}</td><td>string</td></tr>
|
||||
<tr><td>{scan_info}</td><td>string</td></tr>
|
||||
<tr><td>{characters}</td><td>string</td></tr>
|
||||
<tr><td>{teams}</td><td>string</td></tr>
|
||||
<tr><td>{locations}</td><td>string</td></tr>
|
||||
<tr><td>{credits}</td><td>list of dict({'role': string, 'person': string, 'primary': boolean})</td></tr>
|
||||
<tr><td>{tags}</td><td>list of str</td></tr>
|
||||
<tr><td>{pages}</td><td>list of dict({'Image': string(int), 'Type': string})</td></tr>
|
||||
<tr><td>{price}</td><td>float</td></tr>
|
||||
<tr><td>{is_version_of}</td><td>string</td></tr>
|
||||
<tr><td>{rights}</td><td>string</td></tr>
|
||||
<tr><td>{identifier}</td><td>string</td></tr>
|
||||
<tr><td>{last_mark}</td><td>string</td></tr>
|
||||
<tr><td>{cover_image}</td><td>string</td></tr>
|
||||
</table>
|
||||
<pre>
|
||||
Examples:
|
||||
|
||||
{series} {issue} ({year})
|
||||
Spider-Geddon 1 (2018)
|
||||
|
||||
{series} #{issue} - {title}
|
||||
Spider-Geddon #1 - New Players; Check In
|
||||
|
||||
</pre>
|
||||
</body>
|
||||
</html></string></property>
|
||||
<property name="textInteractionFlags">
|
||||
<set>Qt::TextBrowserInteraction</set>
|
||||
</property>
|
||||
<property name="openExternalLinks">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
<resources/>
|
||||
<connections/>
|
||||
</ui>
|
||||
0
comictaggerlib/ui/__init__.py
Normal file
174
comictaggerlib/ui/autotagmatchwindow.ui
Normal file
@@ -0,0 +1,174 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>dialogMatchSelect</class>
|
||||
<widget class="QDialog" name="dialogMatchSelect">
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>943</width>
|
||||
<height>467</height>
|
||||
</rect>
|
||||
</property>
|
||||
<property name="windowTitle">
|
||||
<string>Select Match</string>
|
||||
</property>
|
||||
<layout class="QGridLayout" name="gridLayout">
|
||||
<item row="0" column="0">
|
||||
<layout class="QVBoxLayout" name="verticalLayout">
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout">
|
||||
<item>
|
||||
<widget class="QWidget" name="archiveCoverContainer" native="true">
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>200</width>
|
||||
<height>350</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>200</width>
|
||||
<height>350</height>
|
||||
</size>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QSplitter" name="splitter">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Expanding" vsizetype="Expanding">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="orientation">
|
||||
<enum>Qt::Vertical</enum>
|
||||
</property>
|
||||
<property name="childrenCollapsible">
|
||||
<bool>false</bool>
|
||||
</property>
|
||||
<widget class="QTableWidget" name="twList">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Expanding" vsizetype="Expanding">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>7</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="selectionMode">
|
||||
<enum>QAbstractItemView::SingleSelection</enum>
|
||||
</property>
|
||||
<property name="selectionBehavior">
|
||||
<enum>QAbstractItemView::SelectRows</enum>
|
||||
</property>
|
||||
<property name="rowCount">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="columnCount">
|
||||
<number>4</number>
|
||||
</property>
|
||||
<attribute name="horizontalHeaderStretchLastSection">
|
||||
<bool>true</bool>
|
||||
</attribute>
|
||||
<attribute name="verticalHeaderVisible">
|
||||
<bool>false</bool>
|
||||
</attribute>
|
||||
<column>
|
||||
<property name="text">
|
||||
<string>Series</string>
|
||||
</property>
|
||||
</column>
|
||||
<column>
|
||||
<property name="text">
|
||||
<string>Publisher</string>
|
||||
</property>
|
||||
</column>
|
||||
<column>
|
||||
<property name="text">
|
||||
<string>Date</string>
|
||||
</property>
|
||||
</column>
|
||||
<column>
|
||||
<property name="text">
|
||||
<string>Title</string>
|
||||
</property>
|
||||
</column>
|
||||
</widget>
|
||||
<widget class="QTextEdit" name="teDescription">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Expanding" vsizetype="Expanding">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>3</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
</widget>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QWidget" name="altCoverContainer" native="true">
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>200</width>
|
||||
<height>350</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>200</width>
|
||||
<height>350</height>
|
||||
</size>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QDialogButtonBox" name="buttonBox">
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="standardButtons">
|
||||
<set>QDialogButtonBox::Cancel|QDialogButtonBox::Ok</set>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
<resources/>
|
||||
<connections>
|
||||
<connection>
|
||||
<sender>buttonBox</sender>
|
||||
<signal>accepted()</signal>
|
||||
<receiver>dialogMatchSelect</receiver>
|
||||
<slot>accept()</slot>
|
||||
<hints>
|
||||
<hint type="sourcelabel">
|
||||
<x>248</x>
|
||||
<y>254</y>
|
||||
</hint>
|
||||
<hint type="destinationlabel">
|
||||
<x>157</x>
|
||||
<y>274</y>
|
||||
</hint>
|
||||
</hints>
|
||||
</connection>
|
||||
<connection>
|
||||
<sender>buttonBox</sender>
|
||||
<signal>rejected()</signal>
|
||||
<receiver>dialogMatchSelect</receiver>
|
||||
<slot>reject()</slot>
|
||||
<hints>
|
||||
<hint type="sourcelabel">
|
||||
<x>316</x>
|
||||
<y>260</y>
|
||||
</hint>
|
||||
<hint type="destinationlabel">
|
||||
<x>286</x>
|
||||
<y>274</y>
|
||||
</hint>
|
||||
</hints>
|
||||
</connection>
|
||||
</connections>
|
||||
</ui>
|
||||
@@ -21,6 +21,44 @@
|
||||
</property>
|
||||
<layout class="QGridLayout" name="gridLayout">
|
||||
<item row="0" column="1">
|
||||
<widget class="QWidget" name="archiveCoverContainer" native="true">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Fixed" vsizetype="Expanding">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>110</width>
|
||||
<height>165</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>110</width>
|
||||
<height>165</height>
|
||||
</size>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="0" column="4">
|
||||
<widget class="QWidget" name="testCoverContainer" native="true">
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>110</width>
|
||||
<height>165</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>110</width>
|
||||
<height>165</height>
|
||||
</size>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="0" column="2">
|
||||
<layout class="QVBoxLayout" name="verticalLayout">
|
||||
<item>
|
||||
<widget class="QProgressBar" name="progressBar">
|
||||
@@ -66,50 +104,6 @@
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item row="0" column="0">
|
||||
<widget class="QLabel" name="lblArchive">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Fixed" vsizetype="Expanding">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>110</width>
|
||||
<height>165</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>110</width>
|
||||
<height>165</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>TextLabel</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="0" column="2">
|
||||
<widget class="QLabel" name="lblTest">
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>110</width>
|
||||
<height>165</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>110</width>
|
||||
<height>165</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>TextLabel</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
<resources/>
|
||||
272
comictaggerlib/ui/autotagstartwindow.ui
Normal file
@@ -0,0 +1,272 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>dialogExport</class>
|
||||
<widget class="QDialog" name="dialogExport">
|
||||
<property name="windowModality">
|
||||
<enum>Qt::NonModal</enum>
|
||||
</property>
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>519</width>
|
||||
<height>440</height>
|
||||
</rect>
|
||||
</property>
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Preferred" vsizetype="MinimumExpanding">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="windowTitle">
|
||||
<string>Auto-Tag</string>
|
||||
</property>
|
||||
<property name="modal">
|
||||
<bool>false</bool>
|
||||
</property>
|
||||
<layout class="QGridLayout" name="gridLayout_2">
|
||||
<item row="0" column="0">
|
||||
<widget class="QLabel" name="label">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Preferred" vsizetype="Preferred">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
<property name="wordWrap">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="0">
|
||||
<layout class="QGridLayout" name="gridLayout">
|
||||
<item row="9" column="0">
|
||||
<widget class="QCheckBox" name="cbxSpecifySearchString">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Specify series search string for all selected archives:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="0" column="0">
|
||||
<widget class="QCheckBox" name="cbxSaveOnLowConfidence">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Save on low confidence match</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="4" column="0">
|
||||
<widget class="QCheckBox" name="cbxIgnoreLeadingDigitsInFilename">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Ignore leading (sequence) numbers in filename</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="5" column="0">
|
||||
<widget class="QCheckBox" name="cbxRemoveAfterSuccess">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Remove archives from list after successful tagging</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="3" column="0">
|
||||
<widget class="QCheckBox" name="cbxWaitForRateLimit">
|
||||
<property name="text">
|
||||
<string>Wait and retry when Comic Vine rate limit is exceeded (experimental)</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="0">
|
||||
<widget class="QCheckBox" name="cbxDontUseYear">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Don't use publication year in indentification process</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="0">
|
||||
<widget class="QCheckBox" name="cbxAssumeIssueOne">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>If no issue number, assume "1"</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="6" column="0">
|
||||
<widget class="QCheckBox" name="cbxAutoImprint">
|
||||
<property name="toolTip">
|
||||
<string>Checks the publisher against a list of imprints.</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Auto Imprint</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="7" column="0">
|
||||
<widget class="QCheckBox" name="cbxRemoveMetadata">
|
||||
<property name="toolTip">
|
||||
<string>Removes existing metadata before applying retrieved metadata</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Overwrite metadata</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="8" column="0">
|
||||
<widget class="QCheckBox" name="cbxSplitWords">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Split words in filenames (e.g. 'judgedredd' to 'judge dredd') (Experimental)</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="12" column="0">
|
||||
<widget class="QLineEdit" name="leNameLengthMatchTolerance">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Expanding" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>50</width>
|
||||
<height>16777215</height>
|
||||
</size>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="10" column="0">
|
||||
<widget class="QLineEdit" name="leSearchString">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Expanding" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="11" column="0">
|
||||
<widget class="QLabel" name="label_3">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Preferred" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Adjust Name Length Match Tolerance:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item row="2" column="0">
|
||||
<widget class="QLabel" name="label_2">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Preferred" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>40</width>
|
||||
<height>0</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="4" column="0">
|
||||
<widget class="QDialogButtonBox" name="buttonBox">
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="standardButtons">
|
||||
<set>QDialogButtonBox::Cancel|QDialogButtonBox::Ok</set>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
<resources/>
|
||||
<connections>
|
||||
<connection>
|
||||
<sender>buttonBox</sender>
|
||||
<signal>accepted()</signal>
|
||||
<receiver>dialogExport</receiver>
|
||||
<slot>accept()</slot>
|
||||
<hints>
|
||||
<hint type="sourcelabel">
|
||||
<x>346</x>
|
||||
<y>187</y>
|
||||
</hint>
|
||||
<hint type="destinationlabel">
|
||||
<x>277</x>
|
||||
<y>104</y>
|
||||
</hint>
|
||||
</hints>
|
||||
</connection>
|
||||
<connection>
|
||||
<sender>buttonBox</sender>
|
||||
<signal>rejected()</signal>
|
||||
<receiver>dialogExport</receiver>
|
||||
<slot>reject()</slot>
|
||||
<hints>
|
||||
<hint type="sourcelabel">
|
||||
<x>346</x>
|
||||
<y>187</y>
|
||||
</hint>
|
||||
<hint type="destinationlabel">
|
||||
<x>277</x>
|
||||
<y>104</y>
|
||||
</hint>
|
||||
</hints>
|
||||
</connection>
|
||||
</connections>
|
||||
</ui>
|
||||
132
comictaggerlib/ui/coverimagewidget.ui
Normal file
@@ -0,0 +1,132 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>coverImageWidget</class>
|
||||
<widget class="QWidget" name="coverImageWidget">
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>292</width>
|
||||
<height>353</height>
|
||||
</rect>
|
||||
</property>
|
||||
<property name="windowTitle">
|
||||
<string>Form</string>
|
||||
</property>
|
||||
<layout class="QGridLayout" name="gridLayout">
|
||||
<property name="leftMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="topMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="rightMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="bottomMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="horizontalSpacing">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="verticalSpacing">
|
||||
<number>4</number>
|
||||
</property>
|
||||
<item row="1" column="0">
|
||||
<layout class="QHBoxLayout" name="horizontalLayout">
|
||||
<property name="spacing">
|
||||
<number>2</number>
|
||||
</property>
|
||||
<item>
|
||||
<widget class="QPushButton" name="btnLeft">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Preferred" vsizetype="Preferred">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>30</width>
|
||||
<height>16777215</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QLabel" name="label">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="MinimumExpanding" vsizetype="Preferred">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
<property name="alignment">
|
||||
<set>Qt::AlignCenter</set>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QPushButton" name="btnRight">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Preferred" vsizetype="Preferred">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>30</width>
|
||||
<height>16777215</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item row="0" column="0">
|
||||
<widget class="QFrame" name="frame">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Expanding" vsizetype="Expanding">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="frameShape">
|
||||
<enum>QFrame::NoFrame</enum>
|
||||
</property>
|
||||
<property name="frameShadow">
|
||||
<enum>QFrame::Raised</enum>
|
||||
</property>
|
||||
<widget class="QLabel" name="lblImage">
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>60</x>
|
||||
<y>50</y>
|
||||
<width>91</width>
|
||||
<height>61</height>
|
||||
</rect>
|
||||
</property>
|
||||
<property name="toolTip">
|
||||
<string>Double-click to expand</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>TextLabel</string>
|
||||
</property>
|
||||
</widget>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
<resources/>
|
||||
<connections/>
|
||||
</ui>
|
||||