mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Sync to trunk.
This commit is contained in:
commit
eff34542e5
@ -45,9 +45,10 @@ def freeze():
|
|||||||
'/usr/lib/libpoppler.so.4',
|
'/usr/lib/libpoppler.so.4',
|
||||||
'/usr/lib/libpoppler-qt4.so.3',
|
'/usr/lib/libpoppler-qt4.so.3',
|
||||||
'/usr/lib/libxml2.so.2',
|
'/usr/lib/libxml2.so.2',
|
||||||
'/usr/lib/libdbus-1.so.3',
|
|
||||||
'/usr/lib/libopenjpeg.so.2',
|
'/usr/lib/libopenjpeg.so.2',
|
||||||
'/usr/lib/libxslt.so.1',
|
'/usr/lib/libxslt.so.1',
|
||||||
|
'/usr/lib64/libjpeg.so.7'.replace('64', '64' if is64bit
|
||||||
|
else ''),
|
||||||
'/usr/lib/libxslt.so.1',
|
'/usr/lib/libxslt.so.1',
|
||||||
'/usr/lib/libgthread-2.0.so.0',
|
'/usr/lib/libgthread-2.0.so.0',
|
||||||
'/usr/lib/gcc/***-pc-linux-gnu/4.4.1/libstdc++.so.6'.replace('***',
|
'/usr/lib/gcc/***-pc-linux-gnu/4.4.1/libstdc++.so.6'.replace('***',
|
||||||
|
@ -232,10 +232,12 @@ test
|
|||||||
}
|
}
|
||||||
FileGroup ::BEF8D398-58BA-1F66-39D6-D4A63D5BEEF9 -setup Install -active Yes -platforms {AIX-ppc FreeBSD-4-x86 FreeBSD-x86 HPUX-hppa Linux-x86 Solaris-sparc Windows TarArchive ZipArchive FreeBSD-5-x86 FreeBSD-6-x86 FreeBSD-7-x86 Linux-x86_64 Solaris-x86} -name {Program Files} -parent FileGroups
|
FileGroup ::BEF8D398-58BA-1F66-39D6-D4A63D5BEEF9 -setup Install -active Yes -platforms {AIX-ppc FreeBSD-4-x86 FreeBSD-x86 HPUX-hppa Linux-x86 Solaris-sparc Windows TarArchive ZipArchive FreeBSD-5-x86 FreeBSD-6-x86 FreeBSD-7-x86 Linux-x86_64 Solaris-x86} -name {Program Files} -parent FileGroups
|
||||||
File ::8E5D85A4-7608-47A1-CF7C-309060D5FF40 -filemethod {Always overwrite files} -type dir -directory <%InstallDir%> -name /home/kovid/work/calibre/build/py2exe -parent BEF8D398-58BA-1F66-39D6-D4A63D5BEEF9
|
File ::8E5D85A4-7608-47A1-CF7C-309060D5FF40 -filemethod {Always overwrite files} -type dir -directory <%InstallDir%> -name /home/kovid/work/calibre/build/py2exe -parent BEF8D398-58BA-1F66-39D6-D4A63D5BEEF9
|
||||||
|
File ::FC870EE7-667B-481F-113B-B4504DFCCFA5 -type dir -name bin -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::377C588B-B324-CA09-ED49-4DB5F82A15ED -type dir -name etc -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::377C588B-B324-CA09-ED49-4DB5F82A15ED -type dir -name etc -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::55DE4B9F-0881-FF51-E2BA-EC72B5D3425C -type dir -name fonts -parent 377C588B-B324-CA09-ED49-4DB5F82A15ED
|
File ::55DE4B9F-0881-FF51-E2BA-EC72B5D3425C -type dir -name fonts -parent 377C588B-B324-CA09-ED49-4DB5F82A15ED
|
||||||
File ::A27B68D9-43A6-B994-3091-E829AFBA340D -type dir -name conf.d -parent 55DE4B9F-0881-FF51-E2BA-EC72B5D3425C
|
File ::A27B68D9-43A6-B994-3091-E829AFBA340D -type dir -name conf.d -parent 55DE4B9F-0881-FF51-E2BA-EC72B5D3425C
|
||||||
File ::974ADD48-88E5-BC7A-1963-928A245F133A -type dir -name conf.avail -parent 55DE4B9F-0881-FF51-E2BA-EC72B5D3425C
|
File ::974ADD48-88E5-BC7A-1963-928A245F133A -type dir -name conf.avail -parent 55DE4B9F-0881-FF51-E2BA-EC72B5D3425C
|
||||||
|
File ::5E5273D8-3423-8DC8-83C4-BE000069A803 -name fonts.dtd -parent 55DE4B9F-0881-FF51-E2BA-EC72B5D3425C
|
||||||
File ::32D7DBE0-E0B1-5BDD-66C5-2A13D8BC8F90 -name fonts.conf -parent 55DE4B9F-0881-FF51-E2BA-EC72B5D3425C
|
File ::32D7DBE0-E0B1-5BDD-66C5-2A13D8BC8F90 -name fonts.conf -parent 55DE4B9F-0881-FF51-E2BA-EC72B5D3425C
|
||||||
File ::B95D03D4-EA59-F00E-59E1-BA05758879DA -type dir -name imageformats -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::B95D03D4-EA59-F00E-59E1-BA05758879DA -type dir -name imageformats -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::A624029D-AE0F-49A5-4DAC-7720CDCAB271 -name qmng4.dll -parent B95D03D4-EA59-F00E-59E1-BA05758879DA
|
File ::A624029D-AE0F-49A5-4DAC-7720CDCAB271 -name qmng4.dll -parent B95D03D4-EA59-F00E-59E1-BA05758879DA
|
||||||
@ -335,6 +337,7 @@ File ::2F90282D-B59F-B6BA-090B-45858AF7F3B2 -name IM_MOD_RL_clipboard_.dll -pare
|
|||||||
File ::B512D139-B295-D7C3-F0B4-43775849CF58 -name numpy.core._sort.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::B512D139-B295-D7C3-F0B4-43775849CF58 -name numpy.core._sort.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::A2C063AC-2F12-9260-501A-0E8BD0B8A932 -name calibre.exe.local -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::A2C063AC-2F12-9260-501A-0E8BD0B8A932 -name calibre.exe.local -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::439B6D05-7DE6-061D-4BCC-3F04F4FA2FA2 -name IM_MOD_RL_png_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::439B6D05-7DE6-061D-4BCC-3F04F4FA2FA2 -name IM_MOD_RL_png_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
|
File ::BA464D11-BBCE-DEDA-C354-0C7BE60FAA05 -name IM_MOD_RL_braille_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::8F9FF823-AF6D-A288-8AE6-7D74F55DCE29 -name CORE_RL_bzlib_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::8F9FF823-AF6D-A288-8AE6-7D74F55DCE29 -name CORE_RL_bzlib_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::57A3F5D5-BFC8-CB38-5A57-548EE0DB033B -name QtNetwork4.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::57A3F5D5-BFC8-CB38-5A57-548EE0DB033B -name QtNetwork4.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::4DB7E8DE-905A-822A-AF14-17BD5ACEF915 -name IM_MOD_RL_wmf_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::4DB7E8DE-905A-822A-AF14-17BD5ACEF915 -name IM_MOD_RL_wmf_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
@ -358,6 +361,7 @@ File ::A6419A84-6C22-784E-6D84-D09972770770 -name unicodedata.pyd -parent 8E5D85
|
|||||||
File ::E658FBE0-5860-D041-12D3-76ADD18F804B -name servicemanager.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::E658FBE0-5860-D041-12D3-76ADD18F804B -name servicemanager.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::C98A6FC4-E341-7FD4-005C-DA2B384E11D8 -name win32api.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::C98A6FC4-E341-7FD4-005C-DA2B384E11D8 -name win32api.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::ADA36EEA-7DE1-447C-B1AB-A4908E65E2CD -name IM_MOD_RL_ipl_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::ADA36EEA-7DE1-447C-B1AB-A4908E65E2CD -name IM_MOD_RL_ipl_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
|
File ::53C2EC15-850F-8F49-6425-C228FB6E6D0E -name libfontconfig-1.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::EDE6F457-C83F-C5FA-9AF4-38FDFF17D929 -name PIL._imagingtk.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::EDE6F457-C83F-C5FA-9AF4-38FDFF17D929 -name PIL._imagingtk.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::09D0906E-3611-3DB7-32CF-A140585694A7 -name win32pdh.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::09D0906E-3611-3DB7-32CF-A140585694A7 -name win32pdh.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::4C84F0DC-7157-0C90-2062-180139B03E25 -name IM_MOD_RL_rgb_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::4C84F0DC-7157-0C90-2062-180139B03E25 -name IM_MOD_RL_rgb_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
@ -380,14 +384,17 @@ File ::404A98F1-84FD-B6D0-B130-354EECD9253C -name IM_MOD_RL_emf_.dll -parent 8E5
|
|||||||
File ::17034C34-403E-B405-99C1-F80B7F00E27C -name log.xml -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::17034C34-403E-B405-99C1-F80B7F00E27C -name log.xml -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::34E63A2C-65C5-0A84-ACF1-BD6A844D4579 -name pythoncom26.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::34E63A2C-65C5-0A84-ACF1-BD6A844D4579 -name pythoncom26.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::2F20484B-53B8-B08E-B691-C5B2D49A9CB4 -name QtWebKit4.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::2F20484B-53B8-B08E-B691-C5B2D49A9CB4 -name QtWebKit4.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
|
File ::8AF134C8-9189-3F9A-A081-9143FFD44C45 -name freetype6.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::E8A4442D-D0D3-31CD-997A-3CEB641CF5B7 -name IM_MOD_RL_mtv_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::E8A4442D-D0D3-31CD-997A-3CEB641CF5B7 -name IM_MOD_RL_mtv_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::0CA87D0B-5A04-1439-AEE8-C97072D47BA7 -name CORE_RL_tiff_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::0CA87D0B-5A04-1439-AEE8-C97072D47BA7 -name CORE_RL_tiff_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::AC24F520-88D4-D1CF-5797-27C715CE8ACA -name pyexpat.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::AC24F520-88D4-D1CF-5797-27C715CE8ACA -name pyexpat.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::16848F38-71CD-55B8-4D96-1537F6773744 -name IM_MOD_RL_dps_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::16848F38-71CD-55B8-4D96-1537F6773744 -name IM_MOD_RL_dps_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
|
File ::33A46CC5-BAC4-5863-C83D-303DCCA0CAA1 -name tk85.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::81116DD3-1715-AA87-472F-544FC616EDAF -name IM_MOD_RL_dcm_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::81116DD3-1715-AA87-472F-544FC616EDAF -name IM_MOD_RL_dcm_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::335A4CFB-5C2D-44E4-C438-7018E8244C3D -name ebook-viewer.exe -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::335A4CFB-5C2D-44E4-C438-7018E8244C3D -name ebook-viewer.exe -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::62A16C3B-ED9C-5187-2807-58857DF3A990 -name calibre-debug.exe -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::62A16C3B-ED9C-5187-2807-58857DF3A990 -name calibre-debug.exe -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::55ECA7B7-279A-F51D-81C2-C8DC44CF0E22 -name select.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::55ECA7B7-279A-F51D-81C2-C8DC44CF0E22 -name select.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
|
File ::A6AF5ECC-A981-4CBD-DBEE-303A9340C603 -name IM_MOD_RL_xps_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::5BCBF71F-18E7-5C52-E3F5-7D7F3028AD46 -name locale.xml -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::5BCBF71F-18E7-5C52-E3F5-7D7F3028AD46 -name locale.xml -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::5C9FA94C-B8B0-A94B-548D-1D24FDEA5770 -name CORE_RL_wand_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::5C9FA94C-B8B0-A94B-548D-1D24FDEA5770 -name CORE_RL_wand_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::E39E60BE-DE77-AB8C-42C6-5A7D7DC073E3 -name IM_MOD_RL_ttf_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::E39E60BE-DE77-AB8C-42C6-5A7D7DC073E3 -name IM_MOD_RL_ttf_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
@ -436,10 +443,12 @@ File ::396B4F78-FB45-C0B2-ACB3-97769CF5CD5D -name msvcr90.dll -parent 8E5D85A4-7
|
|||||||
File ::1DE767EE-4891-4E54-422D-67A4DFF8C3B5 -name lrfviewer.exe -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::1DE767EE-4891-4E54-422D-67A4DFF8C3B5 -name lrfviewer.exe -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::AFFEC28A-615C-E3E6-0026-CCE2594A6D25 -name calibre-server.exe.local -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::AFFEC28A-615C-E3E6-0026-CCE2594A6D25 -name calibre-server.exe.local -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::2C64F86B-9366-B52D-F7B2-5BBD51F6982A -name IM_MOD_RL_pwp_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::2C64F86B-9366-B52D-F7B2-5BBD51F6982A -name IM_MOD_RL_pwp_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
|
File ::F4B2EF9C-EB18-B865-6E99-75CFB9B60D87 -name IM_MOD_RL_dds_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::38770D87-6CA9-9E3E-FBA1-A8CCFCD88FB5 -name IM_MOD_RL_fpx_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::38770D87-6CA9-9E3E-FBA1-A8CCFCD88FB5 -name IM_MOD_RL_fpx_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::BE2D7BC3-D294-AF3F-65E7-3B372DEFDE36 -name PIL._imaging.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::BE2D7BC3-D294-AF3F-65E7-3B372DEFDE36 -name PIL._imaging.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::200B9AEC-809F-75B7-DC12-A51BFC2A6F93 -name PyQt4.QtSvg.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::200B9AEC-809F-75B7-DC12-A51BFC2A6F93 -name PyQt4.QtSvg.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::52132A31-D3AE-C617-7568-BF2AF46B5D74 -name IM_MOD_RL_pcl_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::52132A31-D3AE-C617-7568-BF2AF46B5D74 -name IM_MOD_RL_pcl_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
|
File ::F94472C3-C8D0-950F-5ED9-1611D1CE30E5 -name IM_MOD_RL_inline_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::055ADB4B-20C5-E071-442F-4DA0A8D6F3C5 -name english.xml -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::055ADB4B-20C5-E071-442F-4DA0A8D6F3C5 -name english.xml -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::B10B6B91-0C03-642D-90D8-37B607B164AD -name IM_MOD_RL_wpg_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::B10B6B91-0C03-642D-90D8-37B607B164AD -name IM_MOD_RL_wpg_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::BFBB144B-1794-8304-9772-F103A42F2CA4 -name IM_MOD_RL_pdb_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::BFBB144B-1794-8304-9772-F103A42F2CA4 -name IM_MOD_RL_pdb_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
@ -494,6 +503,7 @@ File ::9BA85EE5-1754-67AF-736D-481CDCC72DD2 -name _imagingft.pyd -parent 8E5D85A
|
|||||||
File ::6254DD0C-8F2C-D4AE-2107-2597D542C181 -name IM_MOD_RL_matte_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::6254DD0C-8F2C-D4AE-2107-2597D542C181 -name IM_MOD_RL_matte_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::F159D566-88D6-C347-3E3C-55C2DDFC5FD0 -name IM_MOD_RL_mono_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::F159D566-88D6-C347-3E3C-55C2DDFC5FD0 -name IM_MOD_RL_mono_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::B873CAA2-011F-94C3-7977-FF344E53C44F -name CORE_RL_jbig_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::B873CAA2-011F-94C3-7977-FF344E53C44F -name CORE_RL_jbig_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
|
File ::7004FCB8-C6F4-C7AF-08E4-B6151B2F7050 -name tcl85.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::6921F62A-4015-4C9F-98A6-BCBBC43B698E -name msvcm90.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::6921F62A-4015-4C9F-98A6-BCBBC43B698E -name msvcm90.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::7276E0CA-C205-4B18-19A3-157F1B8523FB -name IM_MOD_RL_xtrn_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::7276E0CA-C205-4B18-19A3-157F1B8523FB -name IM_MOD_RL_xtrn_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::7B9624A9-88B4-C61E-6771-9A34FB6CA3B5 -name PyQt4.QtGui.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::7B9624A9-88B4-C61E-6771-9A34FB6CA3B5 -name PyQt4.QtGui.pyd -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
@ -557,15 +567,6 @@ File ::9E4E5E8F-30C0-E631-9516-2AE01A5CA0E9 -name ebook-device.exe.local -parent
|
|||||||
File ::7BE6B538-70D5-A7EB-5F91-E14CE57B394B -name calibre-complete.exe.local -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::7BE6B538-70D5-A7EB-5F91-E14CE57B394B -name calibre-complete.exe.local -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::C4E40030-3EE0-8B05-E6B9-89E81433EE1F -name phonon4.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::C4E40030-3EE0-8B05-E6B9-89E81433EE1F -name phonon4.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::9E84342F-36ED-7ED3-8F90-1EC55267BCFC -name poppler-qt4.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
File ::9E84342F-36ED-7ED3-8F90-1EC55267BCFC -name poppler-qt4.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
||||||
File ::86BA442C-90C9-A4E6-1D3E-D144E5F326C1 -name msvcp71.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
|
||||||
File ::11FBAD0B-A2DB-C28A-85B8-D6A22706864F -name mfc71.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
|
||||||
File ::4B9FB3E6-B807-65CC-826D-A398E964D00C -name IM_MOD_RL_hdf_.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
|
||||||
File ::3E201C0C-C7CC-5785-74F6-A6CC7F50A15A -name msvcr71.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
|
||||||
File ::2EE42149-1C12-CCA9-9089-AE1809098D0A -name jpeg62.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
|
||||||
File ::B1FD37B4-E91B-DC1C-1C69-FB2E10EB93AE -name libtiff3.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
|
||||||
File ::15E09D95-97D6-92A9-CC4D-120885E4DDAD -name freetype.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
|
||||||
File ::D954BC75-8166-EC1B-D91B-C9779248AA14 -name fontconfig.dll -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
|
||||||
File ::1F3C052A-A5E0-5C65-8D42-EBF44FBE138D -name podofo.dll.manifest -parent 8E5D85A4-7608-47A1-CF7C-309060D5FF40
|
|
||||||
Component ::F6829AB7-9F66-4CEE-CA0E-21F54C6D3609 -setup Install -active Yes -platforms {AIX-ppc FreeBSD-4-x86 FreeBSD-x86 HPUX-hppa Linux-x86 Solaris-sparc Windows FreeBSD-5-x86 FreeBSD-6-x86 FreeBSD-7-x86 Linux-x86_64 Solaris-x86} -name Main -parent Components
|
Component ::F6829AB7-9F66-4CEE-CA0E-21F54C6D3609 -setup Install -active Yes -platforms {AIX-ppc FreeBSD-4-x86 FreeBSD-x86 HPUX-hppa Linux-x86 Solaris-sparc Windows FreeBSD-5-x86 FreeBSD-6-x86 FreeBSD-7-x86 Linux-x86_64 Solaris-x86} -name Main -parent Components
|
||||||
SetupType ::D9ADE41C-B744-690C-2CED-CF826BF03D2E -setup Install -active Yes -platforms {AIX-ppc FreeBSD-4-x86 FreeBSD-x86 HPUX-hppa Linux-x86 Solaris-sparc Windows FreeBSD-5-x86 FreeBSD-6-x86 FreeBSD-7-x86 Linux-x86_64 Solaris-x86} -name Typical -parent SetupTypes
|
SetupType ::D9ADE41C-B744-690C-2CED-CF826BF03D2E -setup Install -active Yes -platforms {AIX-ppc FreeBSD-4-x86 FreeBSD-x86 HPUX-hppa Linux-x86 Solaris-sparc Windows FreeBSD-5-x86 FreeBSD-6-x86 FreeBSD-7-x86 Linux-x86_64 Solaris-x86} -name Typical -parent SetupTypes
|
||||||
|
|
||||||
|
@ -7,12 +7,15 @@ __docformat__ = 'restructuredtext en'
|
|||||||
Freeze app into executable using py2exe.
|
Freeze app into executable using py2exe.
|
||||||
'''
|
'''
|
||||||
QT_DIR = 'C:\\Qt\\4.5.2'
|
QT_DIR = 'C:\\Qt\\4.5.2'
|
||||||
LIBUSB_DIR = r'C:\cygwin\home\kovid\win32\libusb'
|
LIBUSB_DIR = 'C:\\libusb'
|
||||||
LIBUNRAR = 'C:\\Program Files\\UnrarDLL\\unrar.dll'
|
LIBUNRAR = 'C:\\Program Files\\UnrarDLL\\unrar.dll'
|
||||||
BINARIES = r'C:\cygwin\home\kovid\win32\bin'
|
PDFTOHTML = 'C:\\cygwin\\home\\kovid\\poppler-0.10.6\\rel\\pdftohtml.exe'
|
||||||
IMAGEMAGICK_DIR = r'C:\cygwin\home\kovid\win32\imagemagick'
|
POPPLER = 'C:\\cygwin\\home\\kovid\\poppler'
|
||||||
FONTCONFIG_DIR = r'C:\cygwin\home\kovid\win32\etc'
|
IMAGEMAGICK_DIR = 'C:\\ImageMagick'
|
||||||
VC90 = r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT'
|
PDFTK = 'C:\\pdftk.exe'
|
||||||
|
PODOFO = 'C:\\podofo'
|
||||||
|
FONTCONFIG_DIR = 'C:\\fontconfig'
|
||||||
|
VC90 = r'C:\VC90.CRT'
|
||||||
|
|
||||||
# ModuleFinder can't handle runtime changes to __path__, but win32com uses them
|
# ModuleFinder can't handle runtime changes to __path__, but win32com uses them
|
||||||
import sys
|
import sys
|
||||||
@ -95,17 +98,25 @@ class BuildEXE(py2exe.build_exe.py2exe):
|
|||||||
shutil.copyfile(f, os.path.join(tdir, os.path.basename(f)))
|
shutil.copyfile(f, os.path.join(tdir, os.path.basename(f)))
|
||||||
print '\tAdding unrar'
|
print '\tAdding unrar'
|
||||||
shutil.copyfile(LIBUNRAR, os.path.join(PY2EXE_DIR, os.path.basename(LIBUNRAR)))
|
shutil.copyfile(LIBUNRAR, os.path.join(PY2EXE_DIR, os.path.basename(LIBUNRAR)))
|
||||||
print '\tAdding Binaries'
|
print '\tAdding poppler'
|
||||||
for x in glob.glob(os.path.join(BINARIES, '*.dll')) + \
|
for x in ('bin\\pdftohtml.exe', 'bin\\poppler-qt4.dll',
|
||||||
[os.path.join(BINARIES, 'pdftohtml.exe')] + \
|
'bin\\freetype.dll', 'bin\\jpeg62.dll'):
|
||||||
glob.glob(os.path.join(BINARIES, '*.manifest')):
|
shutil.copyfile(os.path.join(POPPLER, x),
|
||||||
shutil.copyfile(x, os.path.join(PY2EXE_DIR, os.path.basename(x)))
|
os.path.join(PY2EXE_DIR, os.path.basename(x)))
|
||||||
|
print '\tAdding podofo'
|
||||||
|
for f in glob.glob(os.path.join(PODOFO, '*.dll')):
|
||||||
|
shutil.copyfile(f, os.path.join(PY2EXE_DIR, os.path.basename(f)))
|
||||||
|
|
||||||
print '\tAdding ImageMagick'
|
print '\tAdding ImageMagick'
|
||||||
for f in os.listdir(IMAGEMAGICK_DIR):
|
for f in os.listdir(IMAGEMAGICK_DIR):
|
||||||
shutil.copyfile(os.path.join(IMAGEMAGICK_DIR, f), os.path.join(PY2EXE_DIR, f))
|
shutil.copyfile(os.path.join(IMAGEMAGICK_DIR, f), os.path.join(PY2EXE_DIR, f))
|
||||||
print '\tCopying fontconfig'
|
print '\tCopying fontconfig'
|
||||||
tgt = os.path.join(PY2EXE_DIR, 'etc')
|
for f in glob.glob(os.path.join(FONTCONFIG_DIR, '*')):
|
||||||
shutil.copytree(FONTCONFIG_DIR, tgt)
|
tgt = os.path.join(PY2EXE_DIR, os.path.basename(f))
|
||||||
|
if os.path.isdir(f):
|
||||||
|
shutil.copytree(f, tgt)
|
||||||
|
else:
|
||||||
|
shutil.copyfile(f, tgt)
|
||||||
|
|
||||||
print
|
print
|
||||||
print 'Doing DLL redirection' # See http://msdn.microsoft.com/en-us/library/ms682600(VS.85).aspx
|
print 'Doing DLL redirection' # See http://msdn.microsoft.com/en-us/library/ms682600(VS.85).aspx
|
||||||
@ -158,7 +169,8 @@ def main(args=sys.argv):
|
|||||||
'email.iterators',
|
'email.iterators',
|
||||||
'email.generator',
|
'email.generator',
|
||||||
'win32process', 'win32api', 'msvcrt',
|
'win32process', 'win32api', 'msvcrt',
|
||||||
'win32event', 'sqlite3.dump',
|
'win32event', 'calibre.ebooks.lrf.any.*',
|
||||||
|
'sqlite3.dump',
|
||||||
'BeautifulSoup', 'pyreadline',
|
'BeautifulSoup', 'pyreadline',
|
||||||
'pydoc', 'IPython.Extensions.*',
|
'pydoc', 'IPython.Extensions.*',
|
||||||
'calibre.web.feeds.recipes.*',
|
'calibre.web.feeds.recipes.*',
|
||||||
@ -171,8 +183,7 @@ def main(args=sys.argv):
|
|||||||
'excludes' : ["Tkconstants", "Tkinter", "tcl",
|
'excludes' : ["Tkconstants", "Tkinter", "tcl",
|
||||||
"_imagingtk", "ImageTk", "FixTk"
|
"_imagingtk", "ImageTk", "FixTk"
|
||||||
],
|
],
|
||||||
'dll_excludes' : ['mswsock.dll', 'tcl85.dll',
|
'dll_excludes' : ['mswsock.dll'],
|
||||||
'MSVCP90.dll', 'tk85.dll'],
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
17
setup.py
17
setup.py
@ -94,8 +94,8 @@ if __name__ == '__main__':
|
|||||||
sources=['src/calibre/utils/windows/winutil.c'],
|
sources=['src/calibre/utils/windows/winutil.c'],
|
||||||
libraries=['shell32', 'setupapi'],
|
libraries=['shell32', 'setupapi'],
|
||||||
include_dirs=os.environ.get('INCLUDE',
|
include_dirs=os.environ.get('INCLUDE',
|
||||||
'C:/WinDDK/7600.16385.0/inc/api/;'
|
'C:/WinDDK/6001.18001/inc/api/;'
|
||||||
'C:/WinDDK/7600.16385.0/inc/crt/').split(';'),
|
'C:/WinDDK/6001.18001/inc/crt/').split(';'),
|
||||||
extra_compile_args=['/X']
|
extra_compile_args=['/X']
|
||||||
))
|
))
|
||||||
|
|
||||||
@ -103,8 +103,8 @@ if __name__ == '__main__':
|
|||||||
poppler_lib = '/usr/lib'
|
poppler_lib = '/usr/lib'
|
||||||
poppler_libs = []
|
poppler_libs = []
|
||||||
if iswindows:
|
if iswindows:
|
||||||
poppler_inc = r'C:\cygwin\home\kovid\win32\include\poppler\qt4'
|
poppler_inc = r'C:\cygwin\home\kovid\poppler\include\poppler\qt4'
|
||||||
poppler_lib = r'C:\cygwin\home\kovid\win32\lib'
|
poppler_lib = r'C:\cygwin\home\kovid\poppler\lib'
|
||||||
poppler_libs = ['QtCore4', 'QtGui4']
|
poppler_libs = ['QtCore4', 'QtGui4']
|
||||||
if isosx:
|
if isosx:
|
||||||
poppler_inc = '/Volumes/sw/build/poppler-0.10.7/qt4/src'
|
poppler_inc = '/Volumes/sw/build/poppler-0.10.7/qt4/src'
|
||||||
@ -124,10 +124,9 @@ if __name__ == '__main__':
|
|||||||
print 'POPPLER_LIB_DIR environment variables.'
|
print 'POPPLER_LIB_DIR environment variables.'
|
||||||
|
|
||||||
podofo_inc = '/usr/include/podofo' if islinux else \
|
podofo_inc = '/usr/include/podofo' if islinux else \
|
||||||
r'C:\cygwin\home\kovid\win32\include\podofo' if iswindows else \
|
'C:\\podofo\\include\\podofo' if iswindows else \
|
||||||
'/usr/local/include/podofo'
|
'/usr/local/include/podofo'
|
||||||
podofo_lib = '/usr/lib' if islinux else \
|
podofo_lib = '/usr/lib' if islinux else r'C:\podofo' if iswindows else \
|
||||||
r'C:\cygwin\home\kovid\win32\lib' if iswindows else \
|
|
||||||
'/usr/local/lib'
|
'/usr/local/lib'
|
||||||
podofo_inc = os.environ.get('PODOFO_INC_DIR', podofo_inc)
|
podofo_inc = os.environ.get('PODOFO_INC_DIR', podofo_inc)
|
||||||
if os.path.exists(os.path.join(podofo_inc, 'podofo.h')):
|
if os.path.exists(os.path.join(podofo_inc, 'podofo.h')):
|
||||||
@ -142,10 +141,10 @@ if __name__ == '__main__':
|
|||||||
print 'PODOFO_LIB_DIR environment variables.'
|
print 'PODOFO_LIB_DIR environment variables.'
|
||||||
|
|
||||||
fc_inc = '/usr/include/fontconfig' if islinux else \
|
fc_inc = '/usr/include/fontconfig' if islinux else \
|
||||||
r'C:\cygwin\home\kovid\win32\include\fontconfig' if iswindows else \
|
r'C:\cygwin\home\kovid\fontconfig\include\fontconfig' if iswindows else \
|
||||||
'/Users/kovid/fontconfig/include/fontconfig'
|
'/Users/kovid/fontconfig/include/fontconfig'
|
||||||
fc_lib = '/usr/lib' if islinux else \
|
fc_lib = '/usr/lib' if islinux else \
|
||||||
r'C:\cygwin\home\kovid\win32\lib' if iswindows else \
|
r'C:\cygwin\home\kovid\fontconfig\lib' if iswindows else \
|
||||||
'/Users/kovid/fontconfig/lib'
|
'/Users/kovid/fontconfig/lib'
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@ __license__ = 'GPL v3'
|
|||||||
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
__appname__ = 'calibre'
|
__appname__ = 'calibre'
|
||||||
__version__ = '0.6.8'
|
__version__ = '0.6.10'
|
||||||
__author__ = "Kovid Goyal <kovid@kovidgoyal.net>"
|
__author__ = "Kovid Goyal <kovid@kovidgoyal.net>"
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
@ -27,12 +27,13 @@ every time you add an HTML file to the library.\
|
|||||||
from calibre.ebooks.epub import initialize_container
|
from calibre.ebooks.epub import initialize_container
|
||||||
|
|
||||||
with TemporaryDirectory('_plugin_html2zip') as tdir:
|
with TemporaryDirectory('_plugin_html2zip') as tdir:
|
||||||
recs =[('debug_input', tdir, OptionRecommendation.HIGH)]
|
recs =[('debug_pipeline', tdir, OptionRecommendation.HIGH)]
|
||||||
if self.site_customization and self.site_customization.strip():
|
if self.site_customization and self.site_customization.strip():
|
||||||
recs.append(['input_encoding', self.site_customization.strip(),
|
recs.append(['input_encoding', self.site_customization.strip(),
|
||||||
OptionRecommendation.HIGH])
|
OptionRecommendation.HIGH])
|
||||||
gui_convert(htmlfile, tdir, recs)
|
gui_convert(htmlfile, tdir, recs, abort_after_input_dump=True)
|
||||||
of = self.temporary_file('_plugin_html2zip.zip')
|
of = self.temporary_file('_plugin_html2zip.zip')
|
||||||
|
tdir = os.path.join(tdir, 'input')
|
||||||
opf = glob.glob(os.path.join(tdir, '*.opf'))[0]
|
opf = glob.glob(os.path.join(tdir, '*.opf'))[0]
|
||||||
ncx = glob.glob(os.path.join(tdir, '*.ncx'))
|
ncx = glob.glob(os.path.join(tdir, '*.ncx'))
|
||||||
if ncx:
|
if ncx:
|
||||||
|
@ -117,15 +117,6 @@ class InputFormatPlugin(Plugin):
|
|||||||
#: in sub-classes. Use :member:`options` instead. Every option must be an
|
#: in sub-classes. Use :member:`options` instead. Every option must be an
|
||||||
#: instance of :class:`OptionRecommendation`.
|
#: instance of :class:`OptionRecommendation`.
|
||||||
common_options = set([
|
common_options = set([
|
||||||
OptionRecommendation(name='debug_input',
|
|
||||||
recommended_value=None, level=OptionRecommendation.LOW,
|
|
||||||
help=_('Save the output from the input plugin to the specified '
|
|
||||||
'directory. Useful if you are unsure at which stage '
|
|
||||||
'of the conversion process a bug is occurring. '
|
|
||||||
'WARNING: This completely deletes the contents of '
|
|
||||||
'the specified directory.')
|
|
||||||
),
|
|
||||||
|
|
||||||
OptionRecommendation(name='input_encoding',
|
OptionRecommendation(name='input_encoding',
|
||||||
recommended_value=None, level=OptionRecommendation.LOW,
|
recommended_value=None, level=OptionRecommendation.LOW,
|
||||||
help=_('Specify the character encoding of the input document. If '
|
help=_('Specify the character encoding of the input document. If '
|
||||||
@ -216,19 +207,6 @@ class InputFormatPlugin(Plugin):
|
|||||||
ret = self.convert(stream, options, file_ext,
|
ret = self.convert(stream, options, file_ext,
|
||||||
log, accelerators)
|
log, accelerators)
|
||||||
|
|
||||||
if options.debug_input is not None:
|
|
||||||
options.debug_input = os.path.abspath(options.debug_input)
|
|
||||||
if not os.path.exists(options.debug_input):
|
|
||||||
os.makedirs(options.debug_input)
|
|
||||||
if isinstance(ret, basestring):
|
|
||||||
shutil.rmtree(options.debug_input)
|
|
||||||
shutil.copytree(output_dir, options.debug_input)
|
|
||||||
else:
|
|
||||||
from calibre.ebooks.oeb.writer import OEBWriter
|
|
||||||
w = OEBWriter(pretty_print=options.pretty_print)
|
|
||||||
w(ret, options.debug_input)
|
|
||||||
|
|
||||||
log.info('Input debug saved to:', options.debug_input)
|
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -351,6 +351,10 @@ def initialize_plugins():
|
|||||||
|
|
||||||
initialize_plugins()
|
initialize_plugins()
|
||||||
|
|
||||||
|
def intialized_plugins():
|
||||||
|
for plugin in _initialized_plugins:
|
||||||
|
yield plugin
|
||||||
|
|
||||||
def option_parser():
|
def option_parser():
|
||||||
parser = OptionParser(usage=_('''\
|
parser = OptionParser(usage=_('''\
|
||||||
%prog options
|
%prog options
|
||||||
|
@ -17,6 +17,7 @@ from calibre.devices.usbms.driver import USBMS
|
|||||||
class KINDLE(USBMS):
|
class KINDLE(USBMS):
|
||||||
|
|
||||||
name = 'Kindle Device Interface'
|
name = 'Kindle Device Interface'
|
||||||
|
gui_name = 'Amazon Kindle'
|
||||||
description = _('Communicate with the Kindle eBook reader.')
|
description = _('Communicate with the Kindle eBook reader.')
|
||||||
author = _('John Schember')
|
author = _('John Schember')
|
||||||
supported_platforms = ['windows', 'osx', 'linux']
|
supported_platforms = ['windows', 'osx', 'linux']
|
||||||
|
@ -112,6 +112,10 @@ class PRS500(DeviceConfig, DevicePlugin):
|
|||||||
SUPPORTS_SUB_DIRS = False
|
SUPPORTS_SUB_DIRS = False
|
||||||
MUST_READ_METADATA = True
|
MUST_READ_METADATA = True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_gui_name(cls):
|
||||||
|
return 'PRS-500'
|
||||||
|
|
||||||
def log_packet(self, packet, header, stream=sys.stderr):
|
def log_packet(self, packet, header, stream=sys.stderr):
|
||||||
"""
|
"""
|
||||||
Log C{packet} to stream C{stream}.
|
Log C{packet} to stream C{stream}.
|
||||||
|
@ -22,6 +22,7 @@ from calibre import __appname__
|
|||||||
class PRS505(CLI, Device):
|
class PRS505(CLI, Device):
|
||||||
|
|
||||||
name = 'PRS-505 Device Interface'
|
name = 'PRS-505 Device Interface'
|
||||||
|
gui_name = 'SONY Pocket Edition'
|
||||||
description = _('Communicate with the Sony PRS-505 eBook reader.')
|
description = _('Communicate with the Sony PRS-505 eBook reader.')
|
||||||
author = _('Kovid Goyal and John Schember')
|
author = _('Kovid Goyal and John Schember')
|
||||||
supported_platforms = ['windows', 'osx', 'linux']
|
supported_platforms = ['windows', 'osx', 'linux']
|
||||||
@ -30,16 +31,16 @@ class PRS505(CLI, Device):
|
|||||||
|
|
||||||
VENDOR_ID = [0x054c] #: SONY Vendor Id
|
VENDOR_ID = [0x054c] #: SONY Vendor Id
|
||||||
PRODUCT_ID = [0x031e] #: Product Id for the PRS-505
|
PRODUCT_ID = [0x031e] #: Product Id for the PRS-505
|
||||||
BCD = [0x229, 0x1000] #: Needed to disambiguate 505 and 700 on linux
|
BCD = [0x229, 0x1000]
|
||||||
|
|
||||||
VENDOR_NAME = 'SONY'
|
VENDOR_NAME = 'SONY'
|
||||||
WINDOWS_MAIN_MEM = 'PRS-505'
|
WINDOWS_MAIN_MEM = re.compile('PRS-(505|300)')
|
||||||
WINDOWS_CARD_A_MEM = re.compile(r'PRS-505/\S+:MS')
|
WINDOWS_CARD_A_MEM = re.compile(r'PRS-(505|300)/\S+:MS')
|
||||||
WINDOWS_CARD_B_MEM = re.compile(r'PRS-505/\S+:SD')
|
WINDOWS_CARD_B_MEM = re.compile(r'PRS-(505|300)/\S+:SD')
|
||||||
|
|
||||||
OSX_MAIN_MEM = re.compile(r'Sony PRS-505/[^:]+ Media')
|
OSX_MAIN_MEM = re.compile(r'Sony PRS-(505|300)/[^:]+ Media')
|
||||||
OSX_CARD_A_MEM = re.compile(r'Sony PRS-505/[^:]+:MS Media')
|
OSX_CARD_A_MEM = re.compile(r'Sony PRS-(505|300)/[^:]+:MS Media')
|
||||||
OSX_CARD_B_MEM = re.compile(r'Sony PRS-505/[^:]+:SD Media')
|
OSX_CARD_B_MEM = re.compile(r'Sony PRS-(505|300)/[^:]+:SD Media')
|
||||||
|
|
||||||
MAIN_MEMORY_VOLUME_LABEL = 'Sony Reader Main Memory'
|
MAIN_MEMORY_VOLUME_LABEL = 'Sony Reader Main Memory'
|
||||||
STORAGE_CARD_VOLUME_LABEL = 'Sony Reader Storage Card'
|
STORAGE_CARD_VOLUME_LABEL = 'Sony Reader Storage Card'
|
||||||
@ -84,7 +85,6 @@ class PRS505(CLI, Device):
|
|||||||
self._card_b_prefix = None
|
self._card_b_prefix = None
|
||||||
|
|
||||||
def get_device_information(self, end_session=True):
|
def get_device_information(self, end_session=True):
|
||||||
#self.report_progress(1.0, _('Get device information...'))
|
|
||||||
return (self.__class__.__name__, '', '', '')
|
return (self.__class__.__name__, '', '', '')
|
||||||
|
|
||||||
def books(self, oncard=None, end_session=True):
|
def books(self, oncard=None, end_session=True):
|
||||||
|
@ -16,15 +16,17 @@ class PRS700(PRS505):
|
|||||||
name = 'PRS-700 Device Interface'
|
name = 'PRS-700 Device Interface'
|
||||||
description = _('Communicate with the Sony PRS-700 eBook reader.')
|
description = _('Communicate with the Sony PRS-700 eBook reader.')
|
||||||
author = _('Kovid Goyal and John Schember')
|
author = _('Kovid Goyal and John Schember')
|
||||||
|
gui_name = 'SONY Touch edition'
|
||||||
supported_platforms = ['windows', 'osx', 'linux']
|
supported_platforms = ['windows', 'osx', 'linux']
|
||||||
|
|
||||||
BCD = [0x31a]
|
BCD = [0x31a]
|
||||||
|
|
||||||
WINDOWS_MAIN_MEM = 'PRS-700'
|
WINDOWS_MAIN_MEM = re.compile('PRS-[67]00')
|
||||||
WINDOWS_CARD_A_MEM = re.compile(r'PRS-700/\S+:MS')
|
WINDOWS_CARD_A_MEM = re.compile(r'PRS-[67]00/\S+:MS')
|
||||||
WINDOWS_CARD_B_MEM = re.compile(r'PRS-700/\S+:SD')
|
WINDOWS_CARD_B_MEM = re.compile(r'PRS-[67]00/\S+:SD')
|
||||||
|
|
||||||
|
OSX_MAIN_MEM = re.compile(r'Sony PRS-[67]00/[^:]+ Media')
|
||||||
|
OSX_CARD_A_MEM = re.compile(r'Sony PRS-[67]00/[^:]+:MS Media')
|
||||||
|
OSX_CARD_B_MEM = re.compile(r'Sony PRS-[67]00/[^:]+:SD Media')
|
||||||
|
|
||||||
OSX_MAIN_MEM = re.compile(r'Sony PRS-700/[^:]+ Media')
|
|
||||||
OSX_CARD_A_MEM = re.compile(r'Sony PRS-700/[^:]+:MS Media')
|
|
||||||
OSX_CARD_B_MEM = re.compile(r'Sony PRS-700/[^:]+:SD Media')
|
|
||||||
|
|
||||||
|
@ -5,10 +5,9 @@ Device scanner that fetches list of devices on system ina platform dependent
|
|||||||
manner.
|
manner.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import sys
|
import sys, re, os
|
||||||
|
|
||||||
from calibre import iswindows, isosx, plugins
|
from calibre import iswindows, isosx, plugins
|
||||||
from calibre.devices import libusb
|
|
||||||
|
|
||||||
osx_scanner = win_scanner = linux_scanner = None
|
osx_scanner = win_scanner = linux_scanner = None
|
||||||
|
|
||||||
@ -22,16 +21,38 @@ elif isosx:
|
|||||||
osx_scanner = plugins['usbobserver'][0].get_usb_devices
|
osx_scanner = plugins['usbobserver'][0].get_usb_devices
|
||||||
except:
|
except:
|
||||||
raise RuntimeError('Failed to load the usbobserver plugin: %s'%plugins['usbobserver'][1])
|
raise RuntimeError('Failed to load the usbobserver plugin: %s'%plugins['usbobserver'][1])
|
||||||
else:
|
|
||||||
linux_scanner = libusb.get_devices
|
_usb_re = re.compile(r'Vendor\s*=\s*([0-9a-fA-F]+)\s+ProdID\s*=\s*([0-9a-fA-F]+)\s+Rev\s*=\s*([0-9a-fA-f.]+)')
|
||||||
|
_DEVICES = '/proc/bus/usb/devices'
|
||||||
|
|
||||||
|
|
||||||
|
def linux_scanner():
|
||||||
|
raw = open(_DEVICES).read()
|
||||||
|
devices = []
|
||||||
|
device = None
|
||||||
|
for x in raw.splitlines():
|
||||||
|
x = x.strip()
|
||||||
|
if x.startswith('T:'):
|
||||||
|
if device:
|
||||||
|
devices.append(device)
|
||||||
|
device = []
|
||||||
|
if device is not None and x.startswith('P:'):
|
||||||
|
match = _usb_re.search(x)
|
||||||
|
if match is not None:
|
||||||
|
ven, prod, bcd = match.group(1), match.group(2), match.group(3)
|
||||||
|
ven, prod, bcd = int(ven, 16), int(prod, 16), int(bcd.replace('.', ''), 16)
|
||||||
|
device = [ven, prod, bcd]
|
||||||
|
if device:
|
||||||
|
devices.append(device)
|
||||||
|
return devices
|
||||||
|
|
||||||
class DeviceScanner(object):
|
class DeviceScanner(object):
|
||||||
|
|
||||||
def __init__(self, *args):
|
def __init__(self, *args):
|
||||||
if isosx and osx_scanner is None:
|
if isosx and osx_scanner is None:
|
||||||
raise RuntimeError('The Python extension usbobserver must be available on OS X.')
|
raise RuntimeError('The Python extension usbobserver must be available on OS X.')
|
||||||
if not (isosx or iswindows) and not libusb.has_library():
|
if not (isosx or iswindows) and not os.access(_DEVICES, os.R_OK):
|
||||||
raise RuntimeError('DeviceScanner requires libusb to work.')
|
raise RuntimeError('DeviceScanner requires %s to work.'%_DEVICES)
|
||||||
self.scanner = win_scanner if iswindows else osx_scanner if isosx else linux_scanner
|
self.scanner = win_scanner if iswindows else osx_scanner if isosx else linux_scanner
|
||||||
self.devices = []
|
self.devices = []
|
||||||
|
|
||||||
|
@ -111,6 +111,14 @@ class Device(DeviceConfig, DevicePlugin):
|
|||||||
def reset(self, key='-1', log_packets=False, report_progress=None) :
|
def reset(self, key='-1', log_packets=False, report_progress=None) :
|
||||||
self._main_prefix = self._card_a_prefix = self._card_b_prefix = None
|
self._main_prefix = self._card_a_prefix = self._card_b_prefix = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_gui_name(cls):
|
||||||
|
x = getattr(cls, 'gui_name', None)
|
||||||
|
if x is None:
|
||||||
|
x = cls.__name__
|
||||||
|
return x
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_fdi(cls):
|
def get_fdi(cls):
|
||||||
fdi = ''
|
fdi = ''
|
||||||
|
@ -158,6 +158,7 @@ def add_pipeline_options(parser, plumber):
|
|||||||
'DEBUG': (_('Options to help with debugging the conversion'),
|
'DEBUG': (_('Options to help with debugging the conversion'),
|
||||||
[
|
[
|
||||||
'verbose',
|
'verbose',
|
||||||
|
'debug_pipeline',
|
||||||
]),
|
]),
|
||||||
|
|
||||||
|
|
||||||
@ -247,8 +248,7 @@ def main(args=sys.argv):
|
|||||||
|
|
||||||
plumber.run()
|
plumber.run()
|
||||||
|
|
||||||
if plumber.opts.debug_input is None:
|
log(_('Output saved to'), ' ', plumber.output)
|
||||||
log(_('Output saved to'), ' ', plumber.output)
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ __license__ = 'GPL 3'
|
|||||||
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
__docformat__ = 'restructuredtext en'
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
import os, re, sys
|
import os, re, sys, shutil
|
||||||
|
|
||||||
from calibre.customize.conversion import OptionRecommendation, DummyReporter
|
from calibre.customize.conversion import OptionRecommendation, DummyReporter
|
||||||
from calibre.customize.ui import input_profiles, output_profiles, \
|
from calibre.customize.ui import input_profiles, output_profiles, \
|
||||||
@ -13,6 +13,27 @@ from calibre.ebooks.conversion.preprocess import HTMLPreProcessor
|
|||||||
from calibre.ptempfile import PersistentTemporaryDirectory
|
from calibre.ptempfile import PersistentTemporaryDirectory
|
||||||
from calibre import extract, walk
|
from calibre import extract, walk
|
||||||
|
|
||||||
|
DEBUG_README=u'''
|
||||||
|
This debug directory contains snapshots of the e-book as it passes through the
|
||||||
|
various stages of conversion. The stages are:
|
||||||
|
|
||||||
|
1. input - This is the result of running the input plugin on the source
|
||||||
|
file. Use this directory to debug the input plugin.
|
||||||
|
|
||||||
|
2. parsed - This is the result of preprocessing and parsing the output of
|
||||||
|
the input plugin. Note that for some input plugins this will be identical to
|
||||||
|
the input sub-directory. Use this directory to debug structure detection,
|
||||||
|
etc.
|
||||||
|
|
||||||
|
3. structure - This corresponds to the stage in the pipeline when structure
|
||||||
|
detection has run, but before the CSS is flattened. Use this directory to
|
||||||
|
debug the CSS flattening, font size conversion, etc.
|
||||||
|
|
||||||
|
4. processed - This corresponds to the e-book as it is passed to the output
|
||||||
|
plugin. Use this directory to debug the output plugin.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
def supported_input_formats():
|
def supported_input_formats():
|
||||||
fmts = available_input_formats()
|
fmts = available_input_formats()
|
||||||
for x in ('zip', 'rar', 'oebzip'):
|
for x in ('zip', 'rar', 'oebzip'):
|
||||||
@ -47,7 +68,7 @@ class Plumber(object):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, input, output, log, report_progress=DummyReporter(),
|
def __init__(self, input, output, log, report_progress=DummyReporter(),
|
||||||
dummy=False, merge_plugin_recs=True):
|
dummy=False, merge_plugin_recs=True, abort_after_input_dump=False):
|
||||||
'''
|
'''
|
||||||
:param input: Path to input file.
|
:param input: Path to input file.
|
||||||
:param output: Path to output file/directory
|
:param output: Path to output file/directory
|
||||||
@ -57,6 +78,7 @@ class Plumber(object):
|
|||||||
self.output = os.path.abspath(output)
|
self.output = os.path.abspath(output)
|
||||||
self.log = log
|
self.log = log
|
||||||
self.ui_reporter = report_progress
|
self.ui_reporter = report_progress
|
||||||
|
self.abort_after_input_dump = abort_after_input_dump
|
||||||
|
|
||||||
# Initialize the conversion options that are independent of input and
|
# Initialize the conversion options that are independent of input and
|
||||||
# output formats. The input and output plugins can still disable these
|
# output formats. The input and output plugins can still disable these
|
||||||
@ -70,6 +92,15 @@ OptionRecommendation(name='verbose',
|
|||||||
'verbosity.')
|
'verbosity.')
|
||||||
),
|
),
|
||||||
|
|
||||||
|
OptionRecommendation(name='debug_pipeline',
|
||||||
|
recommended_value=None, level=OptionRecommendation.LOW,
|
||||||
|
short_switch='d',
|
||||||
|
help=_('Save the output from different stages of the conversion '
|
||||||
|
'pipeline to the specified '
|
||||||
|
'directory. Useful if you are unsure at which stage '
|
||||||
|
'of the conversion process a bug is occurring.')
|
||||||
|
),
|
||||||
|
|
||||||
OptionRecommendation(name='input_profile',
|
OptionRecommendation(name='input_profile',
|
||||||
recommended_value='default', level=OptionRecommendation.LOW,
|
recommended_value='default', level=OptionRecommendation.LOW,
|
||||||
choices=[x.short_name for x in input_profiles()],
|
choices=[x.short_name for x in input_profiles()],
|
||||||
@ -622,6 +653,22 @@ OptionRecommendation(name='language',
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def dump_oeb(self, oeb, out_dir):
|
||||||
|
from calibre.ebooks.oeb.writer import OEBWriter
|
||||||
|
w = OEBWriter(pretty_print=self.opts.pretty_print)
|
||||||
|
w(oeb, out_dir)
|
||||||
|
|
||||||
|
def dump_input(self, ret, output_dir):
|
||||||
|
out_dir = os.path.join(self.opts.debug_pipeline, 'input')
|
||||||
|
if isinstance(ret, basestring):
|
||||||
|
shutil.copytree(output_dir, out_dir)
|
||||||
|
else:
|
||||||
|
os.makedirs(out_dir)
|
||||||
|
self.dump_oeb(ret, out_dir)
|
||||||
|
|
||||||
|
self.log.info('Input debug saved to:', out_dir)
|
||||||
|
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
'''
|
'''
|
||||||
Run the conversion pipeline
|
Run the conversion pipeline
|
||||||
@ -632,6 +679,18 @@ OptionRecommendation(name='language',
|
|||||||
self.log.filter_level = self.log.DEBUG
|
self.log.filter_level = self.log.DEBUG
|
||||||
self.flush()
|
self.flush()
|
||||||
|
|
||||||
|
if self.opts.debug_pipeline is not None:
|
||||||
|
self.opts.verbose = max(self.opts.verbose, 4)
|
||||||
|
self.opts.debug_pipeline = os.path.abspath(self.opts.debug_pipeline)
|
||||||
|
if not os.path.exists(self.opts.debug_pipeline):
|
||||||
|
os.makedirs(self.opts.debug_pipeline)
|
||||||
|
open(os.path.join(self.opts.debug_pipeline, 'README.txt'),
|
||||||
|
'w').write(DEBUG_README.encode('utf-8'))
|
||||||
|
for x in ('input', 'parsed', 'structure', 'processed'):
|
||||||
|
x = os.path.join(self.opts.debug_pipeline, x)
|
||||||
|
if os.path.exists(x):
|
||||||
|
shutil.rmtree(x)
|
||||||
|
|
||||||
# Run any preprocess plugins
|
# Run any preprocess plugins
|
||||||
from calibre.customize.ui import run_plugins_on_preprocess
|
from calibre.customize.ui import run_plugins_on_preprocess
|
||||||
self.input = run_plugins_on_preprocess(self.input)
|
self.input = run_plugins_on_preprocess(self.input)
|
||||||
@ -656,17 +715,23 @@ OptionRecommendation(name='language',
|
|||||||
self.oeb = self.input_plugin(stream, self.opts,
|
self.oeb = self.input_plugin(stream, self.opts,
|
||||||
self.input_fmt, self.log,
|
self.input_fmt, self.log,
|
||||||
accelerators, tdir)
|
accelerators, tdir)
|
||||||
|
if self.opts.debug_pipeline is not None:
|
||||||
|
self.dump_input(self.oeb, tdir)
|
||||||
|
if self.abort_after_input_dump:
|
||||||
|
return
|
||||||
if self.input_fmt == 'recipe':
|
if self.input_fmt == 'recipe':
|
||||||
self.opts_to_mi(self.user_metadata)
|
self.opts_to_mi(self.user_metadata)
|
||||||
if self.opts.debug_input is not None:
|
|
||||||
self.log('Debug input called, aborting the rest of the pipeline.')
|
|
||||||
return
|
|
||||||
if not hasattr(self.oeb, 'manifest'):
|
if not hasattr(self.oeb, 'manifest'):
|
||||||
self.oeb = create_oebbook(self.log, self.oeb, self.opts,
|
self.oeb = create_oebbook(self.log, self.oeb, self.opts,
|
||||||
self.input_plugin)
|
self.input_plugin)
|
||||||
self.input_plugin.postprocess_book(self.oeb, self.opts, self.log)
|
self.input_plugin.postprocess_book(self.oeb, self.opts, self.log)
|
||||||
pr = CompositeProgressReporter(0.34, 0.67, self.ui_reporter)
|
pr = CompositeProgressReporter(0.34, 0.67, self.ui_reporter)
|
||||||
self.flush()
|
self.flush()
|
||||||
|
if self.opts.debug_pipeline is not None:
|
||||||
|
out_dir = os.path.join(self.opts.debug_pipeline, 'parsed')
|
||||||
|
self.dump_oeb(self.oeb, out_dir)
|
||||||
|
self.log('Parsed HTML written to:', out_dir)
|
||||||
|
|
||||||
pr(0., _('Running transforms on ebook...'))
|
pr(0., _('Running transforms on ebook...'))
|
||||||
|
|
||||||
from calibre.ebooks.oeb.transforms.guide import Clean
|
from calibre.ebooks.oeb.transforms.guide import Clean
|
||||||
@ -702,6 +767,12 @@ OptionRecommendation(name='language',
|
|||||||
pr(0.4)
|
pr(0.4)
|
||||||
self.flush()
|
self.flush()
|
||||||
|
|
||||||
|
if self.opts.debug_pipeline is not None:
|
||||||
|
out_dir = os.path.join(self.opts.debug_pipeline, 'structure')
|
||||||
|
self.dump_oeb(self.oeb, out_dir)
|
||||||
|
self.log('Structured HTML written to:', out_dir)
|
||||||
|
|
||||||
|
|
||||||
if self.opts.extra_css and os.path.exists(self.opts.extra_css):
|
if self.opts.extra_css and os.path.exists(self.opts.extra_css):
|
||||||
self.opts.extra_css = open(self.opts.extra_css, 'rb').read()
|
self.opts.extra_css = open(self.opts.extra_css, 'rb').read()
|
||||||
|
|
||||||
@ -739,6 +810,12 @@ OptionRecommendation(name='language',
|
|||||||
pr(1.)
|
pr(1.)
|
||||||
self.flush()
|
self.flush()
|
||||||
|
|
||||||
|
if self.opts.debug_pipeline is not None:
|
||||||
|
out_dir = os.path.join(self.opts.debug_pipeline, 'processed')
|
||||||
|
self.dump_oeb(self.oeb, out_dir)
|
||||||
|
self.log('Processed HTML written to:', out_dir)
|
||||||
|
return
|
||||||
|
|
||||||
self.log.info('Creating %s...'%self.output_plugin.name)
|
self.log.info('Creating %s...'%self.output_plugin.name)
|
||||||
our = CompositeProgressReporter(0.67, 1., self.ui_reporter)
|
our = CompositeProgressReporter(0.67, 1., self.ui_reporter)
|
||||||
self.output_plugin.report_progress = our
|
self.output_plugin.report_progress = our
|
||||||
|
@ -169,7 +169,7 @@ class HTMLPreProcessor(object):
|
|||||||
(re.compile(ur'\u00a0'), lambda match : ' '),
|
(re.compile(ur'\u00a0'), lambda match : ' '),
|
||||||
|
|
||||||
# Detect Chapters to match default XPATH in GUI
|
# Detect Chapters to match default XPATH in GUI
|
||||||
(re.compile(r'(?=<(/?br|p))(<(/?br|p)[^>]*)?>\s*(?P<chap>(<i><b>|<i>|<b>)?(Chapter|Epilogue|Prologue|Book|Part)\s*(\d+|\w+)?(</i></b>|</i>|</b>)?)(</?p[^>]*>|<br[^>]*>)\n?((?=(<i>)?\s*\w+(\s+\w+)?(</i>)?(<br[^>]*>|</?p[^>]*>))((?P<title>(<i>)?\s*\w+(\s+\w+)?(</i>)?)(<br[^>]*>|</?p[^>]*>)))?', re.IGNORECASE), chap_head),
|
(re.compile(r'(?=<(/?br|p))(<(/?br|p)[^>]*)?>\s*(?P<chap>(<i><b>|<i>|<b>)?(Chapter|Epilogue|Prologue|Book|Part)\s*([\d\w-]+)?(</i></b>|</i>|</b>)?)(</?p[^>]*>|<br[^>]*>)\n?((?=(<i>)?\s*\w+(\s+\w+)?(</i>)?(<br[^>]*>|</?p[^>]*>))((?P<title>(<i>)?\s*\w+(\s+\w+)?(</i>)?)(<br[^>]*>|</?p[^>]*>)))?', re.IGNORECASE), chap_head),
|
||||||
(re.compile(r'(?=<(/?br|p))(<(/?br|p)[^>]*)?>\s*(?P<chap>([A-Z \'"!]{5,})\s*(\d+|\w+)?)(</?p[^>]*>|<br[^>]*>)\n?((?=(<i>)?\s*\w+(\s+\w+)?(</i>)?(<br[^>]*>|</?p[^>]*>))((?P<title>.*)(<br[^>]*>|</?p[^>]*>)))?'), chap_head),
|
(re.compile(r'(?=<(/?br|p))(<(/?br|p)[^>]*)?>\s*(?P<chap>([A-Z \'"!]{5,})\s*(\d+|\w+)?)(</?p[^>]*>|<br[^>]*>)\n?((?=(<i>)?\s*\w+(\s+\w+)?(</i>)?(<br[^>]*>|</?p[^>]*>))((?P<title>.*)(<br[^>]*>|</?p[^>]*>)))?'), chap_head),
|
||||||
|
|
||||||
# Have paragraphs show better
|
# Have paragraphs show better
|
||||||
|
@ -17,7 +17,7 @@ class MOBIInput(InputFormatPlugin):
|
|||||||
from calibre.ebooks.mobi.reader import MobiReader
|
from calibre.ebooks.mobi.reader import MobiReader
|
||||||
from lxml import html
|
from lxml import html
|
||||||
mr = MobiReader(stream, log, options.input_encoding,
|
mr = MobiReader(stream, log, options.input_encoding,
|
||||||
options.debug_input)
|
options.debug_pipeline)
|
||||||
parse_cache = {}
|
parse_cache = {}
|
||||||
mr.extract_content('.', parse_cache)
|
mr.extract_content('.', parse_cache)
|
||||||
raw = parse_cache.pop('calibre_raw_mobi_markup', False)
|
raw = parse_cache.pop('calibre_raw_mobi_markup', False)
|
||||||
|
@ -50,10 +50,10 @@ class TXTInput(InputFormatPlugin):
|
|||||||
htmlfile.write(html.encode('utf-8'))
|
htmlfile.write(html.encode('utf-8'))
|
||||||
htmlfile.close()
|
htmlfile.close()
|
||||||
cwd = os.getcwdu()
|
cwd = os.getcwdu()
|
||||||
odi = options.debug_input
|
odi = options.debug_pipeline
|
||||||
options.debug_input = None
|
options.debug_pipeline = None
|
||||||
oeb = html_input(open(htmlfile.name, 'rb'), options, 'html', log,
|
oeb = html_input(open(htmlfile.name, 'rb'), options, 'html', log,
|
||||||
{}, cwd)
|
{}, cwd)
|
||||||
options.debug_input = odi
|
options.debug_pipeline = odi
|
||||||
os.remove(htmlfile.name)
|
os.remove(htmlfile.name)
|
||||||
return oeb
|
return oeb
|
||||||
|
63
src/calibre/gui2/convert/debug.py
Normal file
63
src/calibre/gui2/convert/debug.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
|
||||||
|
from __future__ import with_statement
|
||||||
|
|
||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||||
|
__docformat__ = 'restructuredtext en'
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from PyQt4.Qt import SIGNAL
|
||||||
|
|
||||||
|
from calibre.gui2.convert.debug_ui import Ui_Form
|
||||||
|
from calibre.gui2.convert import Widget
|
||||||
|
from calibre.gui2 import error_dialog, choose_dir
|
||||||
|
|
||||||
|
class DebugWidget(Widget, Ui_Form):
|
||||||
|
|
||||||
|
TITLE = _('Debug')
|
||||||
|
ICON = ':/images/debug.svg'
|
||||||
|
HELP = _('Debug the conversion process.')
|
||||||
|
|
||||||
|
def __init__(self, parent, get_option, get_help, db=None, book_id=None):
|
||||||
|
Widget.__init__(self, parent, 'debug',
|
||||||
|
['debug_pipeline']
|
||||||
|
)
|
||||||
|
self.db, self.book_id = db, book_id
|
||||||
|
self.initialize_options(get_option, get_help, db, book_id)
|
||||||
|
self.connect(self.button_debug_dir, SIGNAL('clicked()'),
|
||||||
|
self.set_debug_dir)
|
||||||
|
self.connect(self.button_clear, SIGNAL('clicked()'),
|
||||||
|
self.clear_debug_dir)
|
||||||
|
|
||||||
|
def clear_debug_dir(self):
|
||||||
|
self.opt_debug_pipeline.setText('')
|
||||||
|
|
||||||
|
def set_debug_dir(self):
|
||||||
|
x = choose_dir(self, 'conversion debug dir', _('Choose debug folder'))
|
||||||
|
if x:
|
||||||
|
self.opt_debug_pipeline.setText(x)
|
||||||
|
|
||||||
|
def pre_commit_check(self):
|
||||||
|
try:
|
||||||
|
x = unicode(self.opt_debug_pipeline.text()).strip()
|
||||||
|
if not x:
|
||||||
|
return True
|
||||||
|
x = os.path.abspath(x)
|
||||||
|
if x:
|
||||||
|
if not os.path.exists(x):
|
||||||
|
os.makedirs(x)
|
||||||
|
test = os.path.join(x, 'test')
|
||||||
|
open(test, 'wb').close()
|
||||||
|
os.remove(test)
|
||||||
|
except:
|
||||||
|
import traceback
|
||||||
|
det_msg = traceback.format_exc()
|
||||||
|
error_dialog(self, _('Invalid debug directory'),
|
||||||
|
_('Failed to create debug directory')+': '+
|
||||||
|
unicode(self.opt_debug_pipeline.text()),
|
||||||
|
det_msg=det_msg, show=True)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
78
src/calibre/gui2/convert/debug.ui
Normal file
78
src/calibre/gui2/convert/debug.ui
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<ui version="4.0">
|
||||||
|
<class>Form</class>
|
||||||
|
<widget class="QWidget" name="Form">
|
||||||
|
<property name="geometry">
|
||||||
|
<rect>
|
||||||
|
<x>0</x>
|
||||||
|
<y>0</y>
|
||||||
|
<width>436</width>
|
||||||
|
<height>382</height>
|
||||||
|
</rect>
|
||||||
|
</property>
|
||||||
|
<property name="windowTitle">
|
||||||
|
<string>Form</string>
|
||||||
|
</property>
|
||||||
|
<layout class="QGridLayout" name="gridLayout">
|
||||||
|
<item row="0" column="0" colspan="3">
|
||||||
|
<widget class="QLabel" name="label">
|
||||||
|
<property name="text">
|
||||||
|
<string>Choose a folder to put the debug output into. If you specify a folder, calibre will place a lot of debug output into it. This will be useful in understanding the conversion process and figuring out the correct values for conversion parameters like Table of Contents and Chapter Detection.</string>
|
||||||
|
</property>
|
||||||
|
<property name="wordWrap">
|
||||||
|
<bool>true</bool>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
<item row="1" column="0">
|
||||||
|
<widget class="QLineEdit" name="opt_debug_pipeline">
|
||||||
|
<property name="readOnly">
|
||||||
|
<bool>true</bool>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
<item row="1" column="2">
|
||||||
|
<widget class="QToolButton" name="button_debug_dir">
|
||||||
|
<property name="toolTip">
|
||||||
|
<string>Choose debug folder</string>
|
||||||
|
</property>
|
||||||
|
<property name="text">
|
||||||
|
<string>...</string>
|
||||||
|
</property>
|
||||||
|
<property name="icon">
|
||||||
|
<iconset resource="../images.qrc">
|
||||||
|
<normaloff>:/images/document_open.svg</normaloff>:/images/document_open.svg</iconset>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
<item row="2" column="0">
|
||||||
|
<spacer name="verticalSpacer">
|
||||||
|
<property name="orientation">
|
||||||
|
<enum>Qt::Vertical</enum>
|
||||||
|
</property>
|
||||||
|
<property name="sizeHint" stdset="0">
|
||||||
|
<size>
|
||||||
|
<width>20</width>
|
||||||
|
<height>40</height>
|
||||||
|
</size>
|
||||||
|
</property>
|
||||||
|
</spacer>
|
||||||
|
</item>
|
||||||
|
<item row="1" column="1">
|
||||||
|
<widget class="QToolButton" name="button_clear">
|
||||||
|
<property name="text">
|
||||||
|
<string>...</string>
|
||||||
|
</property>
|
||||||
|
<property name="icon">
|
||||||
|
<iconset resource="../images.qrc">
|
||||||
|
<normaloff>:/images/clear_left.svg</normaloff>:/images/clear_left.svg</iconset>
|
||||||
|
</property>
|
||||||
|
</widget>
|
||||||
|
</item>
|
||||||
|
</layout>
|
||||||
|
</widget>
|
||||||
|
<resources>
|
||||||
|
<include location="../images.qrc"/>
|
||||||
|
</resources>
|
||||||
|
<connections/>
|
||||||
|
</ui>
|
@ -8,10 +8,12 @@ from calibre.ebooks.conversion.plumber import Plumber, DummyReporter
|
|||||||
from calibre.utils.logging import Log
|
from calibre.utils.logging import Log
|
||||||
from calibre.customize.conversion import OptionRecommendation
|
from calibre.customize.conversion import OptionRecommendation
|
||||||
|
|
||||||
def gui_convert(input, output, recommendations, notification=DummyReporter()):
|
def gui_convert(input, output, recommendations, notification=DummyReporter(),
|
||||||
|
abort_after_input_dump=False):
|
||||||
recommendations = list(recommendations)
|
recommendations = list(recommendations)
|
||||||
recommendations.append(('verbose', 2, OptionRecommendation.HIGH))
|
recommendations.append(('verbose', 2, OptionRecommendation.HIGH))
|
||||||
plumber = Plumber(input, output, Log(), report_progress=notification)
|
plumber = Plumber(input, output, Log(), report_progress=notification,
|
||||||
|
abort_after_input_dump=abort_after_input_dump)
|
||||||
plumber.merge_ui_recommendations(recommendations)
|
plumber.merge_ui_recommendations(recommendations)
|
||||||
|
|
||||||
plumber.run()
|
plumber.run()
|
||||||
|
@ -19,6 +19,8 @@ from calibre.gui2.convert.look_and_feel import LookAndFeelWidget
|
|||||||
from calibre.gui2.convert.page_setup import PageSetupWidget
|
from calibre.gui2.convert.page_setup import PageSetupWidget
|
||||||
from calibre.gui2.convert.structure_detection import StructureDetectionWidget
|
from calibre.gui2.convert.structure_detection import StructureDetectionWidget
|
||||||
from calibre.gui2.convert.toc import TOCWidget
|
from calibre.gui2.convert.toc import TOCWidget
|
||||||
|
from calibre.gui2.convert.debug import DebugWidget
|
||||||
|
|
||||||
|
|
||||||
from calibre.ebooks.conversion.plumber import Plumber, supported_input_formats
|
from calibre.ebooks.conversion.plumber import Plumber, supported_input_formats
|
||||||
from calibre.customize.ui import available_output_formats
|
from calibre.customize.ui import available_output_formats
|
||||||
@ -139,6 +141,7 @@ class Config(ResizableDialog, Ui_Dialog):
|
|||||||
ps = widget_factory(PageSetupWidget)
|
ps = widget_factory(PageSetupWidget)
|
||||||
sd = widget_factory(StructureDetectionWidget)
|
sd = widget_factory(StructureDetectionWidget)
|
||||||
toc = widget_factory(TOCWidget)
|
toc = widget_factory(TOCWidget)
|
||||||
|
debug = widget_factory(DebugWidget)
|
||||||
|
|
||||||
output_widget = None
|
output_widget = None
|
||||||
name = self.plumber.output_plugin.name.lower().replace(' ', '_')
|
name = self.plumber.output_plugin.name.lower().replace(' ', '_')
|
||||||
@ -173,6 +176,7 @@ class Config(ResizableDialog, Ui_Dialog):
|
|||||||
widgets.append(input_widget)
|
widgets.append(input_widget)
|
||||||
if output_widget is not None:
|
if output_widget is not None:
|
||||||
widgets.append(output_widget)
|
widgets.append(output_widget)
|
||||||
|
widgets.append(debug)
|
||||||
for w in widgets:
|
for w in widgets:
|
||||||
self.stack.addWidget(w)
|
self.stack.addWidget(w)
|
||||||
self.connect(w, SIGNAL('set_help(PyQt_PyObject)'),
|
self.connect(w, SIGNAL('set_help(PyQt_PyObject)'),
|
||||||
|
2896
src/calibre/gui2/images/debug.svg
Normal file
2896
src/calibre/gui2/images/debug.svg
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 124 KiB |
@ -722,7 +722,7 @@ class Main(MainWindow, Ui_MainWindow, DeviceGUI):
|
|||||||
self.set_default_thumbnail(\
|
self.set_default_thumbnail(\
|
||||||
self.device_manager.device.THUMBNAIL_HEIGHT)
|
self.device_manager.device.THUMBNAIL_HEIGHT)
|
||||||
self.status_bar.showMessage(_('Device: ')+\
|
self.status_bar.showMessage(_('Device: ')+\
|
||||||
self.device_manager.device.__class__.__name__+\
|
self.device_manager.device.__class__.get_gui_name()+\
|
||||||
_(' detected.'), 3000)
|
_(' detected.'), 3000)
|
||||||
self.device_connected = True
|
self.device_connected = True
|
||||||
self._sync_menu.enable_device_actions(True, self.device_manager.device.card_prefix())
|
self._sync_menu.enable_device_actions(True, self.device_manager.device.card_prefix())
|
||||||
|
@ -126,6 +126,10 @@ def convert_bulk_ebook(parent, db, book_ids, out_format=None):
|
|||||||
OptionRecommendation.HIGH))
|
OptionRecommendation.HIGH))
|
||||||
temp_files.append(d.cover_file)
|
temp_files.append(d.cover_file)
|
||||||
|
|
||||||
|
for x in list(lrecs):
|
||||||
|
if x[0] == 'debug_pipeline':
|
||||||
|
lrecs.remove(x)
|
||||||
|
|
||||||
desc = _('Convert book %d of %d (%s)') % (i + 1, total, repr(mi.title))
|
desc = _('Convert book %d of %d (%s)') % (i + 1, total, repr(mi.title))
|
||||||
|
|
||||||
args = [in_file, out_file.name, lrecs]
|
args = [in_file, out_file.name, lrecs]
|
||||||
|
@ -90,7 +90,7 @@ class Sony500(Device):
|
|||||||
class Sony505(Sony500):
|
class Sony505(Sony500):
|
||||||
|
|
||||||
output_format = 'EPUB'
|
output_format = 'EPUB'
|
||||||
name = 'SONY PRS 505/700'
|
name = 'SONY Reader Pocket/Touch Edition'
|
||||||
id = 'prs505'
|
id = 'prs505'
|
||||||
|
|
||||||
class CybookG3(Device):
|
class CybookG3(Device):
|
||||||
@ -486,6 +486,7 @@ class LibraryPage(QWizardPage, LibraryUI):
|
|||||||
try:
|
try:
|
||||||
os.makedirs(lp)
|
os.makedirs(lp)
|
||||||
except:
|
except:
|
||||||
|
traceback.print_exc()
|
||||||
lp = os.path.expanduser('~')
|
lp = os.path.expanduser('~')
|
||||||
self.location.setText(lp)
|
self.location.setText(lp)
|
||||||
|
|
||||||
|
@ -142,7 +142,7 @@ Why doesn't |app| have a column for foo?
|
|||||||
|
|
||||||
How do I move my |app| library from one computer to another?
|
How do I move my |app| library from one computer to another?
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
Simply copy the |app| library folder from the old to the new computer. You can find out what the library folder is by clicking Preferences. The very first item is the path tot he library folder. Now on the new computer, start |app| for the first time. It will run the Welcome Wizard asking you for the location of the |app| library. Point it to the previously copied folder.
|
Simply copy the |app| library folder from the old to the new computer. You can find out what the library folder is by clicking Preferences. The very first item is the path to the library folder. Now on the new computer, start |app| for the first time. It will run the Welcome Wizard asking you for the location of the |app| library. Point it to the previously copied folder.
|
||||||
|
|
||||||
Note that if you are transferring between different types of computers (for example Windows to OS X) then after doing the above you should also go to Preferences->Advanced and click the Check database integrity button. It will warn you about missing files, if any, which you should then transfer by hand.
|
Note that if you are transferring between different types of computers (for example Windows to OS X) then after doing the above you should also go to Preferences->Advanced and click the Check database integrity button. It will warn you about missing files, if any, which you should then transfer by hand.
|
||||||
|
|
||||||
|
@ -180,7 +180,7 @@ else:
|
|||||||
data = dict(version = version, name='osx',
|
data = dict(version = version, name='osx',
|
||||||
installer_name='OS X universal dmg',
|
installer_name='OS X universal dmg',
|
||||||
title='Download %s for OS X'%(__appname__),
|
title='Download %s for OS X'%(__appname__),
|
||||||
compatibility='%s works on OS X Tiger and above.'%(__appname__,),
|
compatibility='%s works on OS X Tiger and Leopard, but not Snow Leopard.'%(__appname__,),
|
||||||
path=MOBILEREAD+file, app=__appname__,
|
path=MOBILEREAD+file, app=__appname__,
|
||||||
note=Markup(\
|
note=Markup(\
|
||||||
u'''
|
u'''
|
||||||
|
@ -78,7 +78,7 @@
|
|||||||
and press Enter:
|
and press Enter:
|
||||||
</p>
|
</p>
|
||||||
<pre class="wiki">
|
<pre class="wiki">
|
||||||
sudo python -c "import urllib2; exec urllib2.urlopen('http://calibre.kovidgoyal.net/download_linux_binary_installer').read(); main()"
|
sudo python -c "import urllib2; exec urllib2.urlopen('http://status.calibre-ebook.com/linux_installer').read(); main()"
|
||||||
</pre>
|
</pre>
|
||||||
<h4>Note</h4>
|
<h4>Note</h4>
|
||||||
<ul>
|
<ul>
|
||||||
@ -91,15 +91,6 @@ sudo python -c "import urllib2; exec urllib2.urlopen('http://calibre.kovidgoyal.
|
|||||||
You must have xdg-utils installed
|
You must have xdg-utils installed
|
||||||
on your system before running the installer.
|
on your system before running the installer.
|
||||||
</li>
|
</li>
|
||||||
<li>
|
|
||||||
For device automounting to work, you must have the pmount
|
|
||||||
package installed on your system.
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
On a 64bit machine, you must have 32-bit versions
|
|
||||||
of common libraries like X11, freetype, fontconfig,
|
|
||||||
expat and their various dependencies installed.
|
|
||||||
</li>
|
|
||||||
</ul>
|
</ul>
|
||||||
<h3>Source install</h3>
|
<h3>Source install</h3>
|
||||||
<p>
|
<p>
|
||||||
@ -117,7 +108,7 @@ sudo python -c "import urllib2; exec urllib2.urlopen('http://calibre.kovidgoyal.
|
|||||||
<pre class="wiki">
|
<pre class="wiki">
|
||||||
wget -O- http://calibre.kovidgoyal.net/downloads/${app}-${version}.tar.gz | tar xvz
|
wget -O- http://calibre.kovidgoyal.net/downloads/${app}-${version}.tar.gz | tar xvz
|
||||||
cd calibre*
|
cd calibre*
|
||||||
python setup.py build && sudo python setup.py install
|
python setup.py build_ext && python setup.py build && sudo python setup.py install
|
||||||
sudo calibre_postinstall
|
sudo calibre_postinstall
|
||||||
</pre>
|
</pre>
|
||||||
Note that if your distribution does not have a
|
Note that if your distribution does not have a
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -878,6 +878,9 @@ class BasicNewsRecipe(Recipe):
|
|||||||
mi.author_sort = __appname__
|
mi.author_sort = __appname__
|
||||||
mi.publication_type = 'periodical:'+self.publication_type
|
mi.publication_type = 'periodical:'+self.publication_type
|
||||||
mi.timestamp = datetime.now()
|
mi.timestamp = datetime.now()
|
||||||
|
mi.comments = self.description
|
||||||
|
if not isinstance(mi.comments, unicode):
|
||||||
|
mi.comments = mi.comments.decode('utf-8', 'replace')
|
||||||
mi.pubdate = datetime.now()
|
mi.pubdate = datetime.now()
|
||||||
opf_path = os.path.join(dir, 'index.opf')
|
opf_path = os.path.join(dir, 'index.opf')
|
||||||
ncx_path = os.path.join(dir, 'index.ncx')
|
ncx_path = os.path.join(dir, 'index.ncx')
|
||||||
|
@ -55,7 +55,8 @@ recipe_modules = ['recipe_' + r for r in (
|
|||||||
'eltiempo_hn', 'slate', 'tnxm', 'bbcvietnamese', 'vnexpress',
|
'eltiempo_hn', 'slate', 'tnxm', 'bbcvietnamese', 'vnexpress',
|
||||||
'volksrant', 'theeconomictimes_india', 'ourdailybread',
|
'volksrant', 'theeconomictimes_india', 'ourdailybread',
|
||||||
'monitor', 'republika', 'beta', 'beta_en', 'glasjavnosti',
|
'monitor', 'republika', 'beta', 'beta_en', 'glasjavnosti',
|
||||||
'esquire', 'livemint', 'thedgesingapore', 'darknet',
|
'esquire', 'livemint', 'thedgesingapore', 'darknet', 'rga',
|
||||||
|
'intelligencer',
|
||||||
)]
|
)]
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ import re
|
|||||||
from calibre.web.feeds.news import BasicNewsRecipe
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
class CNN(BasicNewsRecipe):
|
class CNN(BasicNewsRecipe):
|
||||||
|
|
||||||
title = 'CNN'
|
title = 'CNN'
|
||||||
description = 'Global news'
|
description = 'Global news'
|
||||||
timefmt = ' [%d %b %Y]'
|
timefmt = ' [%d %b %Y]'
|
||||||
@ -20,7 +20,7 @@ class CNN(BasicNewsRecipe):
|
|||||||
preprocess_regexps = [(re.compile(i[0], re.IGNORECASE | re.DOTALL), i[1]) for i in [
|
preprocess_regexps = [(re.compile(i[0], re.IGNORECASE | re.DOTALL), i[1]) for i in [
|
||||||
(r'<head>.*?<title', lambda match : '<head><title'),
|
(r'<head>.*?<title', lambda match : '<head><title'),
|
||||||
(r'</title>.*?</head>', lambda match : '</title></head>'),
|
(r'</title>.*?</head>', lambda match : '</title></head>'),
|
||||||
(r'<body.*?<\!\-\-Article.*?>', lambda match : ''),
|
(r'<body.*?<\!\-\-Article.*?>', lambda match : '<body>'),
|
||||||
(r'<\!\-\-Article End\-\->.*?</body>', lambda match : '</body>'),
|
(r'<\!\-\-Article End\-\->.*?</body>', lambda match : '</body>'),
|
||||||
(r'(</h\d>)<ul>.*?</ul>', lambda match : match.group(1)), # drop story highlights
|
(r'(</h\d>)<ul>.*?</ul>', lambda match : match.group(1)), # drop story highlights
|
||||||
(r'<h2>(.*?)</h2><h1>(.*?)</h1>', lambda match : '<h1>' + match.group(1) + '</h1><h2>' + match.group(2) + '</h2>'), # sports uses h2 for main title and h1 for subtitle (???) switch these around
|
(r'<h2>(.*?)</h2><h1>(.*?)</h1>', lambda match : '<h1>' + match.group(1) + '</h1><h2>' + match.group(2) + '</h2>'), # sports uses h2 for main title and h1 for subtitle (???) switch these around
|
||||||
@ -33,7 +33,7 @@ class CNN(BasicNewsRecipe):
|
|||||||
|
|
||||||
def print_version(self, url):
|
def print_version(self, url):
|
||||||
return 'http://www.printthis.clickability.com/pt/printThis?clickMap=printThis&fb=Y&url=' + url
|
return 'http://www.printthis.clickability.com/pt/printThis?clickMap=printThis&fb=Y&url=' + url
|
||||||
|
|
||||||
feeds = [
|
feeds = [
|
||||||
('Top News', 'http://rss.cnn.com/rss/cnn_topstories.rss'),
|
('Top News', 'http://rss.cnn.com/rss/cnn_topstories.rss'),
|
||||||
('World', 'http://rss.cnn.com/rss/cnn_world.rss'),
|
('World', 'http://rss.cnn.com/rss/cnn_world.rss'),
|
||||||
|
@ -18,13 +18,16 @@ class HunTechNet(BasicNewsRecipe):
|
|||||||
__author__ = 'Devilinside'
|
__author__ = 'Devilinside'
|
||||||
max_articles_per_feed = 30
|
max_articles_per_feed = 30
|
||||||
timefmt = ' [%Y, %b %d, %a]'
|
timefmt = ' [%Y, %b %d, %a]'
|
||||||
extra_css = '''
|
|
||||||
body{font-family:Arial,Helvetica,sans-serif; font-size:small;}
|
|
||||||
h1{font-size:large;}
|
|
||||||
'''
|
|
||||||
remove_tags_after = dict(name='ul', attrs={'class':'cikk_bottom box'})
|
|
||||||
remove_tags_before = dict(name='div', attrs={'id':'c-main'})
|
remove_tags_before = dict(name='div', attrs={'id':'c-main'})
|
||||||
remove_tags = [dict(name='div', attrs={'class':'wrp clr'})]
|
remove_tags = [dict(name='div', attrs={'class':'wrp clr'}),
|
||||||
|
{'class' : ['screenrdr','forum','print','startlap','text_small','text_normal','text_big','email']},
|
||||||
|
]
|
||||||
|
keep_only_tags = [dict(name='div', attrs={'class':'cikk_head box'}),dict(name='div', attrs={'class':'cikk_txt box'})]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
feeds = [(u'C\xedmlap',
|
feeds = [(u'C\xedmlap',
|
||||||
|
45
src/calibre/web/feeds/recipes/recipe_intelligencer.py
Normal file
45
src/calibre/web/feeds/recipes/recipe_intelligencer.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2009, Darko Miletic <darko.miletic at gmail.com>'
|
||||||
|
|
||||||
|
'''
|
||||||
|
Inteligencer.ca
|
||||||
|
'''
|
||||||
|
|
||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class Inteligencer(BasicNewsRecipe):
|
||||||
|
title = u'Intelligencer'
|
||||||
|
oldest_article = 7
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
encoding = 'utf-8'
|
||||||
|
language = _('English')
|
||||||
|
no_stylesheets = True
|
||||||
|
use_embedded_content = False
|
||||||
|
lang = 'en-CA'
|
||||||
|
|
||||||
|
conversion_options = {
|
||||||
|
'language' : lang
|
||||||
|
, 'pretty_print' : True
|
||||||
|
}
|
||||||
|
|
||||||
|
remove_attributes = ['style','width','height','font','border','align','action','onload']
|
||||||
|
|
||||||
|
keep_only_tags = [dict(name='td',attrs={'colspan':'2'})]
|
||||||
|
|
||||||
|
remove_tags = [
|
||||||
|
dict(name=['object','link','embed','iframe'])
|
||||||
|
,dict(name='div',attrs={'id':'header'})
|
||||||
|
]
|
||||||
|
feeds = [(u'Recent News', u'http://www.intelligencer.ca/rss/')]
|
||||||
|
|
||||||
|
def print_version(self, url):
|
||||||
|
return url.replace('/ArticleDisplay.aspx?','/PrintArticle.aspx?')
|
||||||
|
|
||||||
|
def preprocess_html(self, soup):
|
||||||
|
for item in soup.findAll('td'):
|
||||||
|
del item['colspan']
|
||||||
|
item.name = 'div'
|
||||||
|
return soup
|
||||||
|
|
61
src/calibre/web/feeds/recipes/recipe_rga.py
Normal file
61
src/calibre/web/feeds/recipes/recipe_rga.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
__license__ = 'GPL v3'
|
||||||
|
__copyright__ = '2009, W. Gerard <wii at gerard-nrw.de>'
|
||||||
|
'''
|
||||||
|
rga-online.de
|
||||||
|
'''
|
||||||
|
|
||||||
|
from calibre.web.feeds.news import BasicNewsRecipe
|
||||||
|
|
||||||
|
class rga_onliner(BasicNewsRecipe):
|
||||||
|
title = 'RGA Online - German'
|
||||||
|
__author__ = 'Werner Gerard'
|
||||||
|
description = "E-Zeitung aus RSS-Artikeln zusammengestellt."
|
||||||
|
publisher = 'RGA-Online'
|
||||||
|
category = 'Nachrichten, RGA'
|
||||||
|
oldest_article = 3
|
||||||
|
max_articles_per_feed = 100
|
||||||
|
language = _('German')
|
||||||
|
lang = 'de-DE'
|
||||||
|
no_stylesheets = True
|
||||||
|
use_embedded_content = False
|
||||||
|
encoding = 'cp1252'
|
||||||
|
|
||||||
|
remove_tags_before = dict(name='span', attrs={'class':'headgross'})
|
||||||
|
remove_tags_after = dict(name='br', attrs={'clear':'all'})
|
||||||
|
|
||||||
|
# remove_tags_after = dict(name='br', attrs={'clear':'clear'})
|
||||||
|
|
||||||
|
feeds = [
|
||||||
|
('RGA-Online Remscheid', 'http://www.rga-online.de/rss/rs_news.php'),
|
||||||
|
('RGA-Online Wermelskirchen', 'http://www.rga-online.de/rss/wk_news.php'),
|
||||||
|
('RGA-Online Hueckeswagen', 'http://www.rga-online.de/rss/hk_news.php'),
|
||||||
|
('RGA-Online Radevormwald', 'http://www.rga-online.de/rss/rz_news.php'),
|
||||||
|
('RGA-Online Tagesthemen', 'http://www.rga-online.de/rss/tt_news.php'),
|
||||||
|
('RGA-Online Brennpunkte', 'http://www.rga-online.de/rss/br_news.php'),
|
||||||
|
('RGA-Online Sport', 'http://www.rga-online.de/rss/spo_news.php'),
|
||||||
|
('RGA-Online Lokalsport', 'http://www.rga-online.de/rss/sp_news.php'),
|
||||||
|
('RGA-Online Bergisches Land', 'http://www.rga-online.de/rss/bg_news.php'),
|
||||||
|
('RGA-Online Bergische Wirtschaft', 'http://www.rga-online.de/rss/bw_news.php')
|
||||||
|
]
|
||||||
|
#"print based version"
|
||||||
|
# def print_version(self, url):
|
||||||
|
# main, separatior, sub = url.rpartition('?')
|
||||||
|
# sub1, sep1, artikel = sub.rpartition('&')
|
||||||
|
# sub2, sep2, publikation = sub1.rpartition('&')
|
||||||
|
|
||||||
|
|
||||||
|
# return 'http://www.pipeline.de/cgi-bin/pipeline.fcg?userid=1&publikation=2&template=druck.html&'+ publikation + '&' + artikel
|
||||||
|
# return 'http://www.pipeline.de/cgi-bin/pipeline.fcg?userid=1&publikation=2&template=druck.html&redaktion=2&artikel=109208787'
|
||||||
|
# http://www.pipeline.de/cgi-bin/pipeline.fcg?userid=1&publikation=2&template=druck.html&redaktion=1&artikel=109209772
|
||||||
|
# http://www.rga-online.de/lokales/h6ckeswagen.php?publikation=2&template=phparttext&ausgabe=49740&redaktion=2&artikel=109208787
|
||||||
|
|
||||||
|
|
||||||
|
def get_cover_url(self):
|
||||||
|
return 'http://rga.werner-gerard.de/rga.jpg'
|
||||||
|
|
||||||
|
def postprocess_html(self, soup, first):
|
||||||
|
for tag in soup.findAll(name=['table', 'tr', 'td']):
|
||||||
|
tag.name = 'span'
|
||||||
|
return soup
|
@ -12,44 +12,57 @@ from calibre.web.feeds.news import BasicNewsRecipe
|
|||||||
class ScientificAmerican(BasicNewsRecipe):
|
class ScientificAmerican(BasicNewsRecipe):
|
||||||
title = u'Scientific American'
|
title = u'Scientific American'
|
||||||
description = u'Popular science. Monthly magazine.'
|
description = u'Popular science. Monthly magazine.'
|
||||||
__author__ = 'Kovid Goyal'
|
__author__ = 'Kovid Goyal and Sujata Raman'
|
||||||
language = _('English')
|
language = _('English')
|
||||||
oldest_article = 30
|
oldest_article = 30
|
||||||
max_articles_per_feed = 100
|
max_articles_per_feed = 100
|
||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
use_embedded_content = False
|
use_embedded_content = False
|
||||||
|
extra_css = '''
|
||||||
|
p{font-weight: normal; font-size:small}
|
||||||
|
li{font-weight: normal; font-size:small}
|
||||||
|
.headline p{font-size:x-small; font-family:Arial,Helvetica,sans-serif;}
|
||||||
|
h2{font-size:x-small;}
|
||||||
|
h3{font-size:x-small;font-family:Arial,Helvetica,sans-serif;}
|
||||||
|
'''
|
||||||
remove_tags_before = dict(name='div', attrs={'class':'headline'})
|
remove_tags_before = dict(name='div', attrs={'class':'headline'})
|
||||||
|
|
||||||
remove_tags_after = dict(id=['article'])
|
remove_tags_after = dict(id=['article'])
|
||||||
remove_tags = [
|
remove_tags = [
|
||||||
dict(id=['sharetools', 'reddit']),
|
dict(id=['sharetools', 'reddit']),
|
||||||
dict(name='script'),
|
dict(name='script'),
|
||||||
{'class':['float_left', 'atools']},
|
{'class':['float_left', 'atools']},
|
||||||
{"class": re.compile(r'also-in-this')}
|
{"class": re.compile(r'also-in-this')},
|
||||||
|
dict(name='a',title = ["Get the Rest of the Article","Subscribe","Buy this Issue"]),
|
||||||
|
dict(name = 'img',alt = ["Graphic - Get the Rest of the Article"]),
|
||||||
]
|
]
|
||||||
|
|
||||||
html2lrf_options = ['--base-font-size', '8']
|
html2lrf_options = ['--base-font-size', '8']
|
||||||
recursions = 1
|
recursions = 1
|
||||||
match_regexps = [r'article.cfm.id=\S+page=(2|3|4|5|6|7|8|9|10|11|12|13|14|15)']
|
match_regexps = [r'article.cfm.id=\S+page=(2|3|4|5|6|7|8|9|10|11|12|13|14|15)']
|
||||||
|
|
||||||
def parse_index(self):
|
def parse_index(self):
|
||||||
soup = self.index_to_soup('http://www.scientificamerican.com/sciammag/')
|
soup = self.index_to_soup('http://www.scientificamerican.com/sciammag/')
|
||||||
month = soup.find(id='magazine-month')
|
monthtag = soup.find('div',attrs={'id':'magazine-main_col2'})
|
||||||
|
month = self.tag_to_string(monthtag.contents[1])
|
||||||
|
|
||||||
|
|
||||||
self.timefmt = ' [%s]'%(self.tag_to_string(month))
|
self.timefmt = ' [%s]'%(self.tag_to_string(month))
|
||||||
img = soup.find('img', alt='Scientific American Magazine', src=True)
|
img = soup.find('img', alt='Scientific American Magazine', src=True)
|
||||||
if img is not None:
|
if img is not None:
|
||||||
self.cover_url = img['src']
|
self.cover_url = img['src']
|
||||||
features, feeds = [], []
|
features, feeds = [], []
|
||||||
for p in soup.find(id='magazine-info').findAll('p') + \
|
for p in soup.find(id='magazine-main_col2').findAll('p') :
|
||||||
soup.find(id='magazine-info-more').findAll('p'):
|
a = p.find('a', href=True)
|
||||||
all_as = p.findAll('a', href=True)
|
|
||||||
a = all_as[0]
|
|
||||||
if a is None: continue
|
if a is None: continue
|
||||||
desc = ''
|
desc = ''
|
||||||
for s in p.find('span', attrs={'class':'sub'}):
|
s = p.find('span', attrs={'class':"sub"})
|
||||||
desc += self.tag_to_string(s)
|
desc = self.tag_to_string(s)
|
||||||
|
|
||||||
article = {
|
article = {
|
||||||
'url' : a.get('href'),
|
'url' : a['href'],
|
||||||
'title' : self.tag_to_string(all_as[-1]),
|
'title' : self.tag_to_string(a),
|
||||||
'date' : '',
|
'date' : '',
|
||||||
'description' : desc,
|
'description' : desc,
|
||||||
}
|
}
|
||||||
@ -59,31 +72,31 @@ class ScientificAmerican(BasicNewsRecipe):
|
|||||||
section = []
|
section = []
|
||||||
found = []
|
found = []
|
||||||
title = None
|
title = None
|
||||||
|
|
||||||
for x in soup.find(id='magazine-main_col1').findAll(['div', 'a']):
|
for x in soup.find(id='magazine-main_col1').findAll(['div', 'a']):
|
||||||
|
|
||||||
if x.name == 'div':
|
if x.name == 'div':
|
||||||
|
|
||||||
if section:
|
if section:
|
||||||
feeds.append((title, section))
|
feeds.append((title, section))
|
||||||
|
|
||||||
title = self.tag_to_string(x)
|
title = self.tag_to_string(x)
|
||||||
section = []
|
section = []
|
||||||
else:
|
else:
|
||||||
if title is None or not a.get('href', False) or a.get('href', None) in found:
|
|
||||||
continue
|
if 'article.cfm' in x['href']:
|
||||||
article = {
|
article = {
|
||||||
'url' : x['href'],
|
'url' : x['href'],
|
||||||
'title' : self.tag_to_string(x),
|
'title' : self.tag_to_string(x),
|
||||||
'date': '',
|
'date': '',
|
||||||
'description': '',
|
'description': '',
|
||||||
}
|
}
|
||||||
section.append(article)
|
|
||||||
|
section.append(article)
|
||||||
|
|
||||||
if section:
|
if section:
|
||||||
feeds.append((title, section))
|
feeds.append((title, section))
|
||||||
|
|
||||||
articles = []
|
|
||||||
for a in soup.find(id='opinion').findAll('a', href=True):
|
|
||||||
articles.append({'url':a['href'], 'title':self.tag_to_string(a),
|
|
||||||
'description':'', 'date':''})
|
|
||||||
feeds.append(('Opinion', articles))
|
|
||||||
|
|
||||||
return feeds
|
return feeds
|
||||||
|
|
||||||
|
|
||||||
@ -95,4 +108,5 @@ class ScientificAmerican(BasicNewsRecipe):
|
|||||||
div = soup.find('div', attrs={'class':'headline'})
|
div = soup.find('div', attrs={'class':'headline'})
|
||||||
if div:
|
if div:
|
||||||
div.extract()
|
div.extract()
|
||||||
|
|
||||||
return soup
|
return soup
|
||||||
|
@ -13,28 +13,30 @@ class USAToday(BasicNewsRecipe):
|
|||||||
|
|
||||||
title = 'USA Today'
|
title = 'USA Today'
|
||||||
timefmt = ' [%d %b %Y]'
|
timefmt = ' [%d %b %Y]'
|
||||||
|
__author__ = 'Kovid Goyal and Sujata Raman'
|
||||||
max_articles_per_feed = 20
|
max_articles_per_feed = 20
|
||||||
language = _('English')
|
language = _('English')
|
||||||
__author__ = _('Kovid Goyal and Sujata Raman')
|
|
||||||
|
|
||||||
no_stylesheets = True
|
no_stylesheets = True
|
||||||
extra_css = '''
|
extra_css = '''
|
||||||
.inside-head{font-family:Arial,Helvetica,sans-serif; font-size:large; font-weight:bold }
|
.inside-head{font-family:Arial,Helvetica,sans-serif; font-size:large; font-weight:bold }
|
||||||
.inside-head2{font-family:Arial,Helvetica,sans-serif; font-size:large; font-weight:bold }
|
.inside-head2{font-family:Arial,Helvetica,sans-serif; font-size:large; font-weight:bold }
|
||||||
.inside-head3{font-family:Arial,Helvetica,sans-serif; font-size:large; font-weight:bold }
|
.inside-head3{font-family:Arial,Helvetica,sans-serif; font-size:large; font-weight:bold }
|
||||||
h3{font-family:Arial,Helvetica,sans-serif; font-size:large; font-weight:bold }
|
h3{font-family:Arial,Helvetica,sans-serif; font-size:large; font-weight:bold; }
|
||||||
h4{font-family:Arial,Helvetica,sans-serif; font-size:x-small; font-weight:bold }
|
h4{font-family:Arial,Helvetica,sans-serif; font-size:x-small; font-weight:bold; }
|
||||||
.side-by-side{font-family:Arial,Helvetica,sans-serif; font-size:x-small;}
|
.side-by-side{font-family:Arial,Helvetica,sans-serif; font-size:x-small;}
|
||||||
#byLineTag{font-family:Arial,Helvetica,sans-serif; font-size:xx-small;}
|
#byLineTag{font-family:Arial,Helvetica,sans-serif; font-size:xx-small;}
|
||||||
.inside-copy{font-family:Arial,Helvetica,sans-serif; font-size:x-small;text-align:left}
|
.inside-copy{font-family:Arial,Helvetica,sans-serif; font-size:x-small;text-align:left;}
|
||||||
.caption{font-family:Arial,Helvetica,sans-serif; font-size:x-small;}
|
.caption{font-family:Arial,Helvetica,sans-serif; font-size:x-small;}
|
||||||
|
li{font-family:Arial,Helvetica,sans-serif; font-size:x-small;text-align:left ;}
|
||||||
|
.vatext{font-family:Arial,Helvetica,sans-serif; font-size:x-small;text-align:left ;}
|
||||||
|
.vaTextBold{font-family:Arial,Helvetica,sans-serif; font-size:x-small;font-weight:bold; color:#666666;}
|
||||||
'''
|
'''
|
||||||
remove_tags = [
|
remove_tags = [
|
||||||
dict(name='div', attrs={'class':'inside-copy'}),
|
{'class':['tagListLabel','piped-taglist-string',]}
|
||||||
{'class':['tagListLabel','piped-taglist-string',]}
|
|
||||||
]
|
]
|
||||||
|
|
||||||
html2lrf_options = ['--ignore-tables']
|
conversion_options = { 'linearize_tables' : True }
|
||||||
|
|
||||||
preprocess_regexps = [
|
preprocess_regexps = [
|
||||||
(re.compile(r'<BODY.*?<!--Article Goes Here-->', re.IGNORECASE | re.DOTALL), lambda match : '<BODY>'),
|
(re.compile(r'<BODY.*?<!--Article Goes Here-->', re.IGNORECASE | re.DOTALL), lambda match : '<BODY>'),
|
||||||
|
@ -57,7 +57,7 @@ These API's are described in the CherryPy specification:
|
|||||||
http://www.cherrypy.org/wiki/CherryPySpec
|
http://www.cherrypy.org/wiki/CherryPySpec
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "3.1.1"
|
__version__ = "3.1.2"
|
||||||
|
|
||||||
from urlparse import urljoin as _urljoin
|
from urlparse import urljoin as _urljoin
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ class Checker(object):
|
|||||||
finally:
|
finally:
|
||||||
warnings.formatwarning = oldformatwarning
|
warnings.formatwarning = oldformatwarning
|
||||||
|
|
||||||
def formatwarning(self, message, category, filename, lineno):
|
def formatwarning(self, message, category, filename, lineno, line=None):
|
||||||
"""Function to format a warning."""
|
"""Function to format a warning."""
|
||||||
return "CherryPy Checker:\n%s\n\n" % message
|
return "CherryPy Checker:\n%s\n\n" % message
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ class Checker(object):
|
|||||||
"specific sections. You must explicitly pass "
|
"specific sections. You must explicitly pass "
|
||||||
"application config via "
|
"application config via "
|
||||||
"cherrypy.tree.mount(..., config=app_config)")
|
"cherrypy.tree.mount(..., config=app_config)")
|
||||||
warnings.warn(msg[:5])
|
warnings.warn(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
def check_static_paths(self):
|
def check_static_paths(self):
|
||||||
|
@ -187,7 +187,9 @@ class HTTPError(CherryPyException):
|
|||||||
self.status = status = int(status)
|
self.status = status = int(status)
|
||||||
if status < 400 or status > 599:
|
if status < 400 or status > 599:
|
||||||
raise ValueError("status must be between 400 and 599.")
|
raise ValueError("status must be between 400 and 599.")
|
||||||
self.message = message
|
# See http://www.python.org/dev/peps/pep-0352/
|
||||||
|
# self.message = message
|
||||||
|
self._message = message
|
||||||
CherryPyException.__init__(self, status, message)
|
CherryPyException.__init__(self, status, message)
|
||||||
|
|
||||||
def set_response(self):
|
def set_response(self):
|
||||||
@ -211,7 +213,7 @@ class HTTPError(CherryPyException):
|
|||||||
response.headers['Content-Type'] = "text/html"
|
response.headers['Content-Type'] = "text/html"
|
||||||
|
|
||||||
content = self.get_error_page(self.status, traceback=tb,
|
content = self.get_error_page(self.status, traceback=tb,
|
||||||
message=self.message)
|
message=self._message)
|
||||||
response.body = content
|
response.body = content
|
||||||
response.headers['Content-Length'] = len(content)
|
response.headers['Content-Length'] = len(content)
|
||||||
|
|
||||||
|
@ -646,7 +646,11 @@ class Request(object):
|
|||||||
# Handle cookies differently because on Konqueror, multiple
|
# Handle cookies differently because on Konqueror, multiple
|
||||||
# cookies come on different lines with the same key
|
# cookies come on different lines with the same key
|
||||||
if name == 'Cookie':
|
if name == 'Cookie':
|
||||||
self.cookie.load(value)
|
try:
|
||||||
|
self.cookie.load(value)
|
||||||
|
except Cookie.CookieError:
|
||||||
|
msg = "Illegal cookie name %s" % value.split('=')[0]
|
||||||
|
raise cherrypy.HTTPError(400, msg)
|
||||||
|
|
||||||
if not dict.__contains__(headers, 'Host'):
|
if not dict.__contains__(headers, 'Host'):
|
||||||
# All Internet-based HTTP/1.1 servers MUST respond with a 400
|
# All Internet-based HTTP/1.1 servers MUST respond with a 400
|
||||||
|
@ -17,7 +17,11 @@ class MemoryCache:
|
|||||||
self.clear()
|
self.clear()
|
||||||
t = threading.Thread(target=self.expire_cache, name='expire_cache')
|
t = threading.Thread(target=self.expire_cache, name='expire_cache')
|
||||||
self.expiration_thread = t
|
self.expiration_thread = t
|
||||||
t.setDaemon(True)
|
if hasattr(threading.Thread, "daemon"):
|
||||||
|
# Python 2.6+
|
||||||
|
t.daemon = True
|
||||||
|
else:
|
||||||
|
t.setDaemon(True)
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
|
@ -1,7 +1,13 @@
|
|||||||
"""Functions for builtin CherryPy tools."""
|
"""Functions for builtin CherryPy tools."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import md5
|
|
||||||
|
try:
|
||||||
|
# Python 2.5+
|
||||||
|
from hashlib import md5
|
||||||
|
except ImportError:
|
||||||
|
from md5 import new as md5
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import cherrypy
|
import cherrypy
|
||||||
@ -40,7 +46,7 @@ def validate_etags(autotags=False):
|
|||||||
if (not etag) and autotags:
|
if (not etag) and autotags:
|
||||||
if status == 200:
|
if status == 200:
|
||||||
etag = response.collapse_body()
|
etag = response.collapse_body()
|
||||||
etag = '"%s"' % md5.new(etag).hexdigest()
|
etag = '"%s"' % md5(etag).hexdigest()
|
||||||
response.headers['ETag'] = etag
|
response.headers['ETag'] = etag
|
||||||
|
|
||||||
response.ETag = etag
|
response.ETag = etag
|
||||||
|
@ -241,7 +241,7 @@ def gzip(compress_level=9, mime_types=['text/html', 'text/plain']):
|
|||||||
# to the client.
|
# to the client.
|
||||||
return
|
return
|
||||||
|
|
||||||
ct = response.headers.get('Content-Type').split(';')[0]
|
ct = response.headers.get('Content-Type', '').split(';')[0]
|
||||||
for coding in acceptable:
|
for coding in acceptable:
|
||||||
if coding.value == 'identity' and coding.qvalue != 0:
|
if coding.value == 'identity' and coding.qvalue != 0:
|
||||||
return
|
return
|
||||||
|
@ -59,7 +59,13 @@ __all__ = ("digestAuth", "basicAuth", "doAuth", "checkResponse",
|
|||||||
"calculateNonce", "SUPPORTED_QOP")
|
"calculateNonce", "SUPPORTED_QOP")
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
import md5
|
|
||||||
|
try:
|
||||||
|
# Python 2.5+
|
||||||
|
from hashlib import md5
|
||||||
|
except ImportError:
|
||||||
|
from md5 import new as md5
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import base64
|
import base64
|
||||||
import urllib2
|
import urllib2
|
||||||
@ -76,9 +82,9 @@ SUPPORTED_QOP = (AUTH, AUTH_INT)
|
|||||||
# doAuth
|
# doAuth
|
||||||
#
|
#
|
||||||
DIGEST_AUTH_ENCODERS = {
|
DIGEST_AUTH_ENCODERS = {
|
||||||
MD5: lambda val: md5.new (val).hexdigest (),
|
MD5: lambda val: md5(val).hexdigest(),
|
||||||
MD5_SESS: lambda val: md5.new (val).hexdigest (),
|
MD5_SESS: lambda val: md5(val).hexdigest(),
|
||||||
# SHA: lambda val: sha.new (val).hexdigest (),
|
# SHA: lambda val: sha(val).hexdigest(),
|
||||||
}
|
}
|
||||||
|
|
||||||
def calculateNonce (realm, algorithm = MD5):
|
def calculateNonce (realm, algorithm = MD5):
|
||||||
|
@ -1,698 +0,0 @@
|
|||||||
"""Session implementation for CherryPy.
|
|
||||||
|
|
||||||
We use cherrypy.request to store some convenient variables as
|
|
||||||
well as data about the session for the current request. Instead of
|
|
||||||
polluting cherrypy.request we use a Session object bound to
|
|
||||||
cherrypy.session to store these variables.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import os
|
|
||||||
try:
|
|
||||||
import cPickle as pickle
|
|
||||||
except ImportError:
|
|
||||||
import pickle
|
|
||||||
import random
|
|
||||||
import sha
|
|
||||||
import time
|
|
||||||
import threading
|
|
||||||
import types
|
|
||||||
from warnings import warn
|
|
||||||
|
|
||||||
import cherrypy
|
|
||||||
from cherrypy.lib import http
|
|
||||||
|
|
||||||
|
|
||||||
missing = object()
|
|
||||||
|
|
||||||
class Session(object):
|
|
||||||
"""A CherryPy dict-like Session object (one per request)."""
|
|
||||||
|
|
||||||
__metaclass__ = cherrypy._AttributeDocstrings
|
|
||||||
|
|
||||||
_id = None
|
|
||||||
id_observers = None
|
|
||||||
id_observers__doc = "A list of callbacks to which to pass new id's."
|
|
||||||
|
|
||||||
id__doc = "The current session ID."
|
|
||||||
def _get_id(self):
|
|
||||||
return self._id
|
|
||||||
def _set_id(self, value):
|
|
||||||
self._id = value
|
|
||||||
for o in self.id_observers:
|
|
||||||
o(value)
|
|
||||||
id = property(_get_id, _set_id, doc=id__doc)
|
|
||||||
|
|
||||||
timeout = 60
|
|
||||||
timeout__doc = "Number of minutes after which to delete session data."
|
|
||||||
|
|
||||||
locked = False
|
|
||||||
locked__doc = """
|
|
||||||
If True, this session instance has exclusive read/write access
|
|
||||||
to session data."""
|
|
||||||
|
|
||||||
loaded = False
|
|
||||||
loaded__doc = """
|
|
||||||
If True, data has been retrieved from storage. This should happen
|
|
||||||
automatically on the first attempt to access session data."""
|
|
||||||
|
|
||||||
clean_thread = None
|
|
||||||
clean_thread__doc = "Class-level Monitor which calls self.clean_up."
|
|
||||||
|
|
||||||
clean_freq = 5
|
|
||||||
clean_freq__doc = "The poll rate for expired session cleanup in minutes."
|
|
||||||
|
|
||||||
def __init__(self, id=None, **kwargs):
|
|
||||||
self.id_observers = []
|
|
||||||
self._data = {}
|
|
||||||
|
|
||||||
for k, v in kwargs.iteritems():
|
|
||||||
setattr(self, k, v)
|
|
||||||
|
|
||||||
if id is None:
|
|
||||||
self.regenerate()
|
|
||||||
else:
|
|
||||||
self.id = id
|
|
||||||
if not self._exists():
|
|
||||||
# Expired or malicious session. Make a new one.
|
|
||||||
# See http://www.cherrypy.org/ticket/709.
|
|
||||||
self.id = None
|
|
||||||
self.regenerate()
|
|
||||||
|
|
||||||
def regenerate(self):
|
|
||||||
"""Replace the current session (with a new id)."""
|
|
||||||
if self.id is not None:
|
|
||||||
self.delete()
|
|
||||||
|
|
||||||
old_session_was_locked = self.locked
|
|
||||||
if old_session_was_locked:
|
|
||||||
self.release_lock()
|
|
||||||
|
|
||||||
self.id = None
|
|
||||||
while self.id is None:
|
|
||||||
self.id = self.generate_id()
|
|
||||||
# Assert that the generated id is not already stored.
|
|
||||||
if self._exists():
|
|
||||||
self.id = None
|
|
||||||
|
|
||||||
if old_session_was_locked:
|
|
||||||
self.acquire_lock()
|
|
||||||
|
|
||||||
def clean_up(self):
|
|
||||||
"""Clean up expired sessions."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.urandom(20)
|
|
||||||
except (AttributeError, NotImplementedError):
|
|
||||||
# os.urandom not available until Python 2.4. Fall back to random.random.
|
|
||||||
def generate_id(self):
|
|
||||||
"""Return a new session id."""
|
|
||||||
return sha.new('%s' % random.random()).hexdigest()
|
|
||||||
else:
|
|
||||||
def generate_id(self):
|
|
||||||
"""Return a new session id."""
|
|
||||||
return os.urandom(20).encode('hex')
|
|
||||||
|
|
||||||
def save(self):
|
|
||||||
"""Save session data."""
|
|
||||||
try:
|
|
||||||
# If session data has never been loaded then it's never been
|
|
||||||
# accessed: no need to delete it
|
|
||||||
if self.loaded:
|
|
||||||
t = datetime.timedelta(seconds = self.timeout * 60)
|
|
||||||
expiration_time = datetime.datetime.now() + t
|
|
||||||
self._save(expiration_time)
|
|
||||||
|
|
||||||
finally:
|
|
||||||
if self.locked:
|
|
||||||
# Always release the lock if the user didn't release it
|
|
||||||
self.release_lock()
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
"""Copy stored session data into this session instance."""
|
|
||||||
data = self._load()
|
|
||||||
# data is either None or a tuple (session_data, expiration_time)
|
|
||||||
if data is None or data[1] < datetime.datetime.now():
|
|
||||||
# Expired session: flush session data
|
|
||||||
self._data = {}
|
|
||||||
else:
|
|
||||||
self._data = data[0]
|
|
||||||
self.loaded = True
|
|
||||||
|
|
||||||
# Stick the clean_thread in the class, not the instance.
|
|
||||||
# The instances are created and destroyed per-request.
|
|
||||||
cls = self.__class__
|
|
||||||
if self.clean_freq and not cls.clean_thread:
|
|
||||||
# clean_up is in instancemethod and not a classmethod,
|
|
||||||
# so that tool config can be accessed inside the method.
|
|
||||||
t = cherrypy.process.plugins.Monitor(
|
|
||||||
cherrypy.engine, self.clean_up, self.clean_freq * 60)
|
|
||||||
t.subscribe()
|
|
||||||
cls.clean_thread = t
|
|
||||||
t.start()
|
|
||||||
|
|
||||||
def delete(self):
|
|
||||||
"""Delete stored session data."""
|
|
||||||
self._delete()
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
return self._data[key]
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
self._data[key] = value
|
|
||||||
|
|
||||||
def __delitem__(self, key):
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
del self._data[key]
|
|
||||||
|
|
||||||
def pop(self, key, default=missing):
|
|
||||||
"""Remove the specified key and return the corresponding value.
|
|
||||||
If key is not found, default is returned if given,
|
|
||||||
otherwise KeyError is raised.
|
|
||||||
"""
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
if default is missing:
|
|
||||||
return self._data.pop(key)
|
|
||||||
else:
|
|
||||||
return self._data.pop(key, default)
|
|
||||||
|
|
||||||
def __contains__(self, key):
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
return key in self._data
|
|
||||||
|
|
||||||
def has_key(self, key):
|
|
||||||
"""D.has_key(k) -> True if D has a key k, else False."""
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
return self._data.has_key(key)
|
|
||||||
|
|
||||||
def get(self, key, default=None):
|
|
||||||
"""D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None."""
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
return self._data.get(key, default)
|
|
||||||
|
|
||||||
def update(self, d):
|
|
||||||
"""D.update(E) -> None. Update D from E: for k in E: D[k] = E[k]."""
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
self._data.update(d)
|
|
||||||
|
|
||||||
def setdefault(self, key, default=None):
|
|
||||||
"""D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D."""
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
return self._data.setdefault(key, default)
|
|
||||||
|
|
||||||
def clear(self):
|
|
||||||
"""D.clear() -> None. Remove all items from D."""
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
self._data.clear()
|
|
||||||
|
|
||||||
def keys(self):
|
|
||||||
"""D.keys() -> list of D's keys."""
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
return self._data.keys()
|
|
||||||
|
|
||||||
def items(self):
|
|
||||||
"""D.items() -> list of D's (key, value) pairs, as 2-tuples."""
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
return self._data.items()
|
|
||||||
|
|
||||||
def values(self):
|
|
||||||
"""D.values() -> list of D's values."""
|
|
||||||
if not self.loaded: self.load()
|
|
||||||
return self._data.values()
|
|
||||||
|
|
||||||
|
|
||||||
class RamSession(Session):
|
|
||||||
|
|
||||||
# Class-level objects. Don't rebind these!
|
|
||||||
cache = {}
|
|
||||||
locks = {}
|
|
||||||
|
|
||||||
def clean_up(self):
|
|
||||||
"""Clean up expired sessions."""
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
for id, (data, expiration_time) in self.cache.items():
|
|
||||||
if expiration_time < now:
|
|
||||||
try:
|
|
||||||
del self.cache[id]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
del self.locks[id]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _exists(self):
|
|
||||||
return self.id in self.cache
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
return self.cache.get(self.id)
|
|
||||||
|
|
||||||
def _save(self, expiration_time):
|
|
||||||
self.cache[self.id] = (self._data, expiration_time)
|
|
||||||
|
|
||||||
def _delete(self):
|
|
||||||
del self.cache[self.id]
|
|
||||||
|
|
||||||
def acquire_lock(self):
|
|
||||||
"""Acquire an exclusive lock on the currently-loaded session data."""
|
|
||||||
self.locked = True
|
|
||||||
self.locks.setdefault(self.id, threading.RLock()).acquire()
|
|
||||||
|
|
||||||
def release_lock(self):
|
|
||||||
"""Release the lock on the currently-loaded session data."""
|
|
||||||
self.locks[self.id].release()
|
|
||||||
self.locked = False
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
"""Return the number of active sessions."""
|
|
||||||
return len(self.cache)
|
|
||||||
|
|
||||||
|
|
||||||
class FileSession(Session):
|
|
||||||
"""Implementation of the File backend for sessions
|
|
||||||
|
|
||||||
storage_path: the folder where session data will be saved. Each session
|
|
||||||
will be saved as pickle.dump(data, expiration_time) in its own file;
|
|
||||||
the filename will be self.SESSION_PREFIX + self.id.
|
|
||||||
"""
|
|
||||||
|
|
||||||
SESSION_PREFIX = 'session-'
|
|
||||||
LOCK_SUFFIX = '.lock'
|
|
||||||
|
|
||||||
def __init__(self, id=None, **kwargs):
|
|
||||||
# The 'storage_path' arg is required for file-based sessions.
|
|
||||||
kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
|
|
||||||
Session.__init__(self, id=id, **kwargs)
|
|
||||||
|
|
||||||
def setup(cls, **kwargs):
|
|
||||||
"""Set up the storage system for file-based sessions.
|
|
||||||
|
|
||||||
This should only be called once per process; this will be done
|
|
||||||
automatically when using sessions.init (as the built-in Tool does).
|
|
||||||
"""
|
|
||||||
# The 'storage_path' arg is required for file-based sessions.
|
|
||||||
kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
|
|
||||||
|
|
||||||
for k, v in kwargs.iteritems():
|
|
||||||
setattr(cls, k, v)
|
|
||||||
|
|
||||||
# Warn if any lock files exist at startup.
|
|
||||||
lockfiles = [fname for fname in os.listdir(cls.storage_path)
|
|
||||||
if (fname.startswith(cls.SESSION_PREFIX)
|
|
||||||
and fname.endswith(cls.LOCK_SUFFIX))]
|
|
||||||
if lockfiles:
|
|
||||||
plural = ('', 's')[len(lockfiles) > 1]
|
|
||||||
warn("%s session lockfile%s found at startup. If you are "
|
|
||||||
"only running one process, then you may need to "
|
|
||||||
"manually delete the lockfiles found at %r."
|
|
||||||
% (len(lockfiles), plural, cls.storage_path))
|
|
||||||
setup = classmethod(setup)
|
|
||||||
|
|
||||||
def _get_file_path(self):
|
|
||||||
f = os.path.join(self.storage_path, self.SESSION_PREFIX + self.id)
|
|
||||||
if not os.path.abspath(f).startswith(self.storage_path):
|
|
||||||
raise cherrypy.HTTPError(400, "Invalid session id in cookie.")
|
|
||||||
return f
|
|
||||||
|
|
||||||
def _exists(self):
|
|
||||||
path = self._get_file_path()
|
|
||||||
return os.path.exists(path)
|
|
||||||
|
|
||||||
def _load(self, path=None):
|
|
||||||
if path is None:
|
|
||||||
path = self._get_file_path()
|
|
||||||
try:
|
|
||||||
f = open(path, "rb")
|
|
||||||
try:
|
|
||||||
return pickle.load(f)
|
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
except (IOError, EOFError):
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _save(self, expiration_time):
|
|
||||||
f = open(self._get_file_path(), "wb")
|
|
||||||
try:
|
|
||||||
pickle.dump((self._data, expiration_time), f)
|
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
def _delete(self):
|
|
||||||
try:
|
|
||||||
os.unlink(self._get_file_path())
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def acquire_lock(self, path=None):
|
|
||||||
"""Acquire an exclusive lock on the currently-loaded session data."""
|
|
||||||
if path is None:
|
|
||||||
path = self._get_file_path()
|
|
||||||
path += self.LOCK_SUFFIX
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
lockfd = os.open(path, os.O_CREAT|os.O_WRONLY|os.O_EXCL)
|
|
||||||
except OSError:
|
|
||||||
time.sleep(0.1)
|
|
||||||
else:
|
|
||||||
os.close(lockfd)
|
|
||||||
break
|
|
||||||
self.locked = True
|
|
||||||
|
|
||||||
def release_lock(self, path=None):
|
|
||||||
"""Release the lock on the currently-loaded session data."""
|
|
||||||
if path is None:
|
|
||||||
path = self._get_file_path()
|
|
||||||
os.unlink(path + self.LOCK_SUFFIX)
|
|
||||||
self.locked = False
|
|
||||||
|
|
||||||
def clean_up(self):
|
|
||||||
"""Clean up expired sessions."""
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
# Iterate over all session files in self.storage_path
|
|
||||||
for fname in os.listdir(self.storage_path):
|
|
||||||
if (fname.startswith(self.SESSION_PREFIX)
|
|
||||||
and not fname.endswith(self.LOCK_SUFFIX)):
|
|
||||||
# We have a session file: lock and load it and check
|
|
||||||
# if it's expired. If it fails, nevermind.
|
|
||||||
path = os.path.join(self.storage_path, fname)
|
|
||||||
self.acquire_lock(path)
|
|
||||||
try:
|
|
||||||
contents = self._load(path)
|
|
||||||
# _load returns None on IOError
|
|
||||||
if contents is not None:
|
|
||||||
data, expiration_time = contents
|
|
||||||
if expiration_time < now:
|
|
||||||
# Session expired: deleting it
|
|
||||||
os.unlink(path)
|
|
||||||
finally:
|
|
||||||
self.release_lock(path)
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
"""Return the number of active sessions."""
|
|
||||||
return len([fname for fname in os.listdir(self.storage_path)
|
|
||||||
if (fname.startswith(self.SESSION_PREFIX)
|
|
||||||
and not fname.endswith(self.LOCK_SUFFIX))])
|
|
||||||
|
|
||||||
|
|
||||||
class PostgresqlSession(Session):
|
|
||||||
""" Implementation of the PostgreSQL backend for sessions. It assumes
|
|
||||||
a table like this:
|
|
||||||
|
|
||||||
create table session (
|
|
||||||
id varchar(40),
|
|
||||||
data text,
|
|
||||||
expiration_time timestamp
|
|
||||||
)
|
|
||||||
|
|
||||||
You must provide your own get_db function.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, id=None, **kwargs):
|
|
||||||
Session.__init__(self, id, **kwargs)
|
|
||||||
self.cursor = self.db.cursor()
|
|
||||||
|
|
||||||
def setup(cls, **kwargs):
|
|
||||||
"""Set up the storage system for Postgres-based sessions.
|
|
||||||
|
|
||||||
This should only be called once per process; this will be done
|
|
||||||
automatically when using sessions.init (as the built-in Tool does).
|
|
||||||
"""
|
|
||||||
for k, v in kwargs.iteritems():
|
|
||||||
setattr(cls, k, v)
|
|
||||||
|
|
||||||
self.db = self.get_db()
|
|
||||||
setup = classmethod(setup)
|
|
||||||
|
|
||||||
def __del__(self):
|
|
||||||
if self.cursor:
|
|
||||||
self.cursor.close()
|
|
||||||
self.db.commit()
|
|
||||||
|
|
||||||
def _exists(self):
|
|
||||||
# Select session data from table
|
|
||||||
self.cursor.execute('select data, expiration_time from session '
|
|
||||||
'where id=%s', (self.id,))
|
|
||||||
rows = self.cursor.fetchall()
|
|
||||||
return bool(rows)
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
# Select session data from table
|
|
||||||
self.cursor.execute('select data, expiration_time from session '
|
|
||||||
'where id=%s', (self.id,))
|
|
||||||
rows = self.cursor.fetchall()
|
|
||||||
if not rows:
|
|
||||||
return None
|
|
||||||
|
|
||||||
pickled_data, expiration_time = rows[0]
|
|
||||||
data = pickle.loads(pickled_data)
|
|
||||||
return data, expiration_time
|
|
||||||
|
|
||||||
def _save(self, expiration_time):
|
|
||||||
pickled_data = pickle.dumps(self._data)
|
|
||||||
self.cursor.execute('update session set data = %s, '
|
|
||||||
'expiration_time = %s where id = %s',
|
|
||||||
(pickled_data, expiration_time, self.id))
|
|
||||||
|
|
||||||
def _delete(self):
|
|
||||||
self.cursor.execute('delete from session where id=%s', (self.id,))
|
|
||||||
|
|
||||||
def acquire_lock(self):
|
|
||||||
"""Acquire an exclusive lock on the currently-loaded session data."""
|
|
||||||
# We use the "for update" clause to lock the row
|
|
||||||
self.locked = True
|
|
||||||
self.cursor.execute('select id from session where id=%s for update',
|
|
||||||
(self.id,))
|
|
||||||
|
|
||||||
def release_lock(self):
|
|
||||||
"""Release the lock on the currently-loaded session data."""
|
|
||||||
# We just close the cursor and that will remove the lock
|
|
||||||
# introduced by the "for update" clause
|
|
||||||
self.cursor.close()
|
|
||||||
self.locked = False
|
|
||||||
|
|
||||||
def clean_up(self):
|
|
||||||
"""Clean up expired sessions."""
|
|
||||||
self.cursor.execute('delete from session where expiration_time < %s',
|
|
||||||
(datetime.datetime.now(),))
|
|
||||||
|
|
||||||
|
|
||||||
class MemcachedSession(Session):
|
|
||||||
|
|
||||||
# The most popular memcached client for Python isn't thread-safe.
|
|
||||||
# Wrap all .get and .set operations in a single lock.
|
|
||||||
mc_lock = threading.RLock()
|
|
||||||
|
|
||||||
# This is a seperate set of locks per session id.
|
|
||||||
locks = {}
|
|
||||||
|
|
||||||
servers = ['127.0.0.1:11211']
|
|
||||||
|
|
||||||
def setup(cls, **kwargs):
|
|
||||||
"""Set up the storage system for memcached-based sessions.
|
|
||||||
|
|
||||||
This should only be called once per process; this will be done
|
|
||||||
automatically when using sessions.init (as the built-in Tool does).
|
|
||||||
"""
|
|
||||||
for k, v in kwargs.iteritems():
|
|
||||||
setattr(cls, k, v)
|
|
||||||
|
|
||||||
import memcache
|
|
||||||
cls.cache = memcache.Client(cls.servers)
|
|
||||||
setup = classmethod(setup)
|
|
||||||
|
|
||||||
def _exists(self):
|
|
||||||
self.mc_lock.acquire()
|
|
||||||
try:
|
|
||||||
return bool(self.cache.get(self.id))
|
|
||||||
finally:
|
|
||||||
self.mc_lock.release()
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
self.mc_lock.acquire()
|
|
||||||
try:
|
|
||||||
return self.cache.get(self.id)
|
|
||||||
finally:
|
|
||||||
self.mc_lock.release()
|
|
||||||
|
|
||||||
def _save(self, expiration_time):
|
|
||||||
# Send the expiration time as "Unix time" (seconds since 1/1/1970)
|
|
||||||
td = int(time.mktime(expiration_time.timetuple()))
|
|
||||||
self.mc_lock.acquire()
|
|
||||||
try:
|
|
||||||
if not self.cache.set(self.id, (self._data, expiration_time), td):
|
|
||||||
raise AssertionError("Session data for id %r not set." % self.id)
|
|
||||||
finally:
|
|
||||||
self.mc_lock.release()
|
|
||||||
|
|
||||||
def _delete(self):
|
|
||||||
self.cache.delete(self.id)
|
|
||||||
|
|
||||||
def acquire_lock(self):
|
|
||||||
"""Acquire an exclusive lock on the currently-loaded session data."""
|
|
||||||
self.locked = True
|
|
||||||
self.locks.setdefault(self.id, threading.RLock()).acquire()
|
|
||||||
|
|
||||||
def release_lock(self):
|
|
||||||
"""Release the lock on the currently-loaded session data."""
|
|
||||||
self.locks[self.id].release()
|
|
||||||
self.locked = False
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
"""Return the number of active sessions."""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
# Hook functions (for CherryPy tools)
|
|
||||||
|
|
||||||
def save():
|
|
||||||
"""Save any changed session data."""
|
|
||||||
|
|
||||||
if not hasattr(cherrypy.serving, "session"):
|
|
||||||
return
|
|
||||||
|
|
||||||
# Guard against running twice
|
|
||||||
if hasattr(cherrypy.request, "_sessionsaved"):
|
|
||||||
return
|
|
||||||
cherrypy.request._sessionsaved = True
|
|
||||||
|
|
||||||
if cherrypy.response.stream:
|
|
||||||
# If the body is being streamed, we have to save the data
|
|
||||||
# *after* the response has been written out
|
|
||||||
cherrypy.request.hooks.attach('on_end_request', cherrypy.session.save)
|
|
||||||
else:
|
|
||||||
# If the body is not being streamed, we save the data now
|
|
||||||
# (so we can release the lock).
|
|
||||||
if isinstance(cherrypy.response.body, types.GeneratorType):
|
|
||||||
cherrypy.response.collapse_body()
|
|
||||||
cherrypy.session.save()
|
|
||||||
save.failsafe = True
|
|
||||||
|
|
||||||
def close():
|
|
||||||
"""Close the session object for this request."""
|
|
||||||
sess = getattr(cherrypy.serving, "session", None)
|
|
||||||
if getattr(sess, "locked", False):
|
|
||||||
# If the session is still locked we release the lock
|
|
||||||
sess.release_lock()
|
|
||||||
close.failsafe = True
|
|
||||||
close.priority = 90
|
|
||||||
|
|
||||||
|
|
||||||
def init(storage_type='ram', path=None, path_header=None, name='session_id',
|
|
||||||
timeout=60, domain=None, secure=False, clean_freq=5,
|
|
||||||
persistent=True, **kwargs):
|
|
||||||
"""Initialize session object (using cookies).
|
|
||||||
|
|
||||||
storage_type: one of 'ram', 'file', 'postgresql'. This will be used
|
|
||||||
to look up the corresponding class in cherrypy.lib.sessions
|
|
||||||
globals. For example, 'file' will use the FileSession class.
|
|
||||||
path: the 'path' value to stick in the response cookie metadata.
|
|
||||||
path_header: if 'path' is None (the default), then the response
|
|
||||||
cookie 'path' will be pulled from request.headers[path_header].
|
|
||||||
name: the name of the cookie.
|
|
||||||
timeout: the expiration timeout (in minutes) for the stored session data.
|
|
||||||
If 'persistent' is True (the default), this is also the timeout
|
|
||||||
for the cookie.
|
|
||||||
domain: the cookie domain.
|
|
||||||
secure: if False (the default) the cookie 'secure' value will not
|
|
||||||
be set. If True, the cookie 'secure' value will be set (to 1).
|
|
||||||
clean_freq (minutes): the poll rate for expired session cleanup.
|
|
||||||
persistent: if True (the default), the 'timeout' argument will be used
|
|
||||||
to expire the cookie. If False, the cookie will not have an expiry,
|
|
||||||
and the cookie will be a "session cookie" which expires when the
|
|
||||||
browser is closed.
|
|
||||||
|
|
||||||
Any additional kwargs will be bound to the new Session instance,
|
|
||||||
and may be specific to the storage type. See the subclass of Session
|
|
||||||
you're using for more information.
|
|
||||||
"""
|
|
||||||
|
|
||||||
request = cherrypy.request
|
|
||||||
|
|
||||||
# Guard against running twice
|
|
||||||
if hasattr(request, "_session_init_flag"):
|
|
||||||
return
|
|
||||||
request._session_init_flag = True
|
|
||||||
|
|
||||||
# Check if request came with a session ID
|
|
||||||
id = None
|
|
||||||
if name in request.cookie:
|
|
||||||
id = request.cookie[name].value
|
|
||||||
|
|
||||||
# Find the storage class and call setup (first time only).
|
|
||||||
storage_class = storage_type.title() + 'Session'
|
|
||||||
storage_class = globals()[storage_class]
|
|
||||||
if not hasattr(cherrypy, "session"):
|
|
||||||
if hasattr(storage_class, "setup"):
|
|
||||||
storage_class.setup(**kwargs)
|
|
||||||
|
|
||||||
# Create and attach a new Session instance to cherrypy.serving.
|
|
||||||
# It will possess a reference to (and lock, and lazily load)
|
|
||||||
# the requested session data.
|
|
||||||
kwargs['timeout'] = timeout
|
|
||||||
kwargs['clean_freq'] = clean_freq
|
|
||||||
cherrypy.serving.session = sess = storage_class(id, **kwargs)
|
|
||||||
def update_cookie(id):
|
|
||||||
"""Update the cookie every time the session id changes."""
|
|
||||||
cherrypy.response.cookie[name] = id
|
|
||||||
sess.id_observers.append(update_cookie)
|
|
||||||
|
|
||||||
# Create cherrypy.session which will proxy to cherrypy.serving.session
|
|
||||||
if not hasattr(cherrypy, "session"):
|
|
||||||
cherrypy.session = cherrypy._ThreadLocalProxy('session')
|
|
||||||
|
|
||||||
if persistent:
|
|
||||||
cookie_timeout = timeout
|
|
||||||
else:
|
|
||||||
# See http://support.microsoft.com/kb/223799/EN-US/
|
|
||||||
# and http://support.mozilla.com/en-US/kb/Cookies
|
|
||||||
cookie_timeout = None
|
|
||||||
set_response_cookie(path=path, path_header=path_header, name=name,
|
|
||||||
timeout=cookie_timeout, domain=domain, secure=secure)
|
|
||||||
|
|
||||||
|
|
||||||
def set_response_cookie(path=None, path_header=None, name='session_id',
|
|
||||||
timeout=60, domain=None, secure=False):
|
|
||||||
"""Set a response cookie for the client.
|
|
||||||
|
|
||||||
path: the 'path' value to stick in the response cookie metadata.
|
|
||||||
path_header: if 'path' is None (the default), then the response
|
|
||||||
cookie 'path' will be pulled from request.headers[path_header].
|
|
||||||
name: the name of the cookie.
|
|
||||||
timeout: the expiration timeout for the cookie. If 0 or other boolean
|
|
||||||
False, no 'expires' param will be set, and the cookie will be a
|
|
||||||
"session cookie" which expires when the browser is closed.
|
|
||||||
domain: the cookie domain.
|
|
||||||
secure: if False (the default) the cookie 'secure' value will not
|
|
||||||
be set. If True, the cookie 'secure' value will be set (to 1).
|
|
||||||
"""
|
|
||||||
# Set response cookie
|
|
||||||
cookie = cherrypy.response.cookie
|
|
||||||
cookie[name] = cherrypy.serving.session.id
|
|
||||||
cookie[name]['path'] = (path or cherrypy.request.headers.get(path_header)
|
|
||||||
or '/')
|
|
||||||
|
|
||||||
# We'd like to use the "max-age" param as indicated in
|
|
||||||
# http://www.faqs.org/rfcs/rfc2109.html but IE doesn't
|
|
||||||
# save it to disk and the session is lost if people close
|
|
||||||
# the browser. So we have to use the old "expires" ... sigh ...
|
|
||||||
## cookie[name]['max-age'] = timeout * 60
|
|
||||||
if timeout:
|
|
||||||
cookie[name]['expires'] = http.HTTPDate(time.time() + (timeout * 60))
|
|
||||||
if domain is not None:
|
|
||||||
cookie[name]['domain'] = domain
|
|
||||||
if secure:
|
|
||||||
cookie[name]['secure'] = 1
|
|
||||||
|
|
||||||
|
|
||||||
def expire():
|
|
||||||
"""Expire the current session cookie."""
|
|
||||||
name = cherrypy.request.config.get('tools.sessions.name', 'session_id')
|
|
||||||
one_year = 60 * 60 * 24 * 365
|
|
||||||
exp = time.gmtime(time.time() - one_year)
|
|
||||||
t = time.strftime("%a, %d-%b-%Y %H:%M:%S GMT", exp)
|
|
||||||
cherrypy.response.cookie[name]['expires'] = t
|
|
||||||
|
|
||||||
|
|
@ -13,7 +13,13 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
import pickle
|
import pickle
|
||||||
import random
|
import random
|
||||||
import sha
|
|
||||||
|
try:
|
||||||
|
# Python 2.5+
|
||||||
|
from hashlib import sha1 as sha
|
||||||
|
except ImportError:
|
||||||
|
from sha import new as sha
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
import types
|
import types
|
||||||
@ -108,7 +114,7 @@ class Session(object):
|
|||||||
# os.urandom not available until Python 2.4. Fall back to random.random.
|
# os.urandom not available until Python 2.4. Fall back to random.random.
|
||||||
def generate_id(self):
|
def generate_id(self):
|
||||||
"""Return a new session id."""
|
"""Return a new session id."""
|
||||||
return sha.new('%s' % random.random()).hexdigest()
|
return sha('%s' % random.random()).hexdigest()
|
||||||
else:
|
else:
|
||||||
def generate_id(self):
|
def generate_id(self):
|
||||||
"""Return a new session id."""
|
"""Return a new session id."""
|
||||||
|
@ -167,6 +167,9 @@ def staticdir(section, dir, root="", match="", content_types=None, index=""):
|
|||||||
'/home/me', the Request-URI is 'myapp', and the index arg is
|
'/home/me', the Request-URI is 'myapp', and the index arg is
|
||||||
'index.html', the file '/home/me/myapp/index.html' will be sought.
|
'index.html', the file '/home/me/myapp/index.html' will be sought.
|
||||||
"""
|
"""
|
||||||
|
if cherrypy.request.method not in ('GET', 'HEAD'):
|
||||||
|
return False
|
||||||
|
|
||||||
if match and not re.search(match, cherrypy.request.path_info):
|
if match and not re.search(match, cherrypy.request.path_info):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -217,6 +220,9 @@ def staticfile(filename, root=None, match="", content_types=None):
|
|||||||
a string (e.g. "gif") and 'content-type' is the value to write
|
a string (e.g. "gif") and 'content-type' is the value to write
|
||||||
out in the Content-Type response header (e.g. "image/gif").
|
out in the Content-Type response header (e.g. "image/gif").
|
||||||
"""
|
"""
|
||||||
|
if cherrypy.request.method not in ('GET', 'HEAD'):
|
||||||
|
return False
|
||||||
|
|
||||||
if match and not re.search(match, cherrypy.request.path_info):
|
if match and not re.search(match, cherrypy.request.path_info):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -5,33 +5,33 @@ import time
|
|||||||
|
|
||||||
class ServerAdapter(object):
|
class ServerAdapter(object):
|
||||||
"""Adapter for an HTTP server.
|
"""Adapter for an HTTP server.
|
||||||
|
|
||||||
If you need to start more than one HTTP server (to serve on multiple
|
If you need to start more than one HTTP server (to serve on multiple
|
||||||
ports, or protocols, etc.), you can manually register each one and then
|
ports, or protocols, etc.), you can manually register each one and then
|
||||||
start them all with bus.start:
|
start them all with bus.start:
|
||||||
|
|
||||||
s1 = ServerAdapter(bus, MyWSGIServer(host='0.0.0.0', port=80))
|
s1 = ServerAdapter(bus, MyWSGIServer(host='0.0.0.0', port=80))
|
||||||
s2 = ServerAdapter(bus, another.HTTPServer(host='127.0.0.1', SSL=True))
|
s2 = ServerAdapter(bus, another.HTTPServer(host='127.0.0.1', SSL=True))
|
||||||
s1.subscribe()
|
s1.subscribe()
|
||||||
s2.subscribe()
|
s2.subscribe()
|
||||||
bus.start()
|
bus.start()
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, bus, httpserver=None, bind_addr=None):
|
def __init__(self, bus, httpserver=None, bind_addr=None):
|
||||||
self.bus = bus
|
self.bus = bus
|
||||||
self.httpserver = httpserver
|
self.httpserver = httpserver
|
||||||
self.bind_addr = bind_addr
|
self.bind_addr = bind_addr
|
||||||
self.interrupt = None
|
self.interrupt = None
|
||||||
self.running = False
|
self.running = False
|
||||||
|
|
||||||
def subscribe(self):
|
def subscribe(self):
|
||||||
self.bus.subscribe('start', self.start)
|
self.bus.subscribe('start', self.start)
|
||||||
self.bus.subscribe('stop', self.stop)
|
self.bus.subscribe('stop', self.stop)
|
||||||
|
|
||||||
def unsubscribe(self):
|
def unsubscribe(self):
|
||||||
self.bus.unsubscribe('start', self.start)
|
self.bus.unsubscribe('start', self.start)
|
||||||
self.bus.unsubscribe('stop', self.stop)
|
self.bus.unsubscribe('stop', self.stop)
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
"""Start the HTTP server."""
|
"""Start the HTTP server."""
|
||||||
if isinstance(self.bind_addr, tuple):
|
if isinstance(self.bind_addr, tuple):
|
||||||
@ -39,29 +39,29 @@ class ServerAdapter(object):
|
|||||||
on_what = "%s:%s" % (host, port)
|
on_what = "%s:%s" % (host, port)
|
||||||
else:
|
else:
|
||||||
on_what = "socket file: %s" % self.bind_addr
|
on_what = "socket file: %s" % self.bind_addr
|
||||||
|
|
||||||
if self.running:
|
if self.running:
|
||||||
self.bus.log("Already serving on %s" % on_what)
|
self.bus.log("Already serving on %s" % on_what)
|
||||||
return
|
return
|
||||||
|
|
||||||
self.interrupt = None
|
self.interrupt = None
|
||||||
if not self.httpserver:
|
if not self.httpserver:
|
||||||
raise ValueError("No HTTP server has been created.")
|
raise ValueError("No HTTP server has been created.")
|
||||||
|
|
||||||
# Start the httpserver in a new thread.
|
# Start the httpserver in a new thread.
|
||||||
if isinstance(self.bind_addr, tuple):
|
if isinstance(self.bind_addr, tuple):
|
||||||
wait_for_free_port(*self.bind_addr)
|
wait_for_free_port(*self.bind_addr)
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
t = threading.Thread(target=self._start_http_thread)
|
t = threading.Thread(target=self._start_http_thread)
|
||||||
t.setName("HTTPServer " + t.getName())
|
t.setName("HTTPServer " + t.getName())
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
self.wait()
|
self.wait()
|
||||||
self.running = True
|
self.running = True
|
||||||
self.bus.log("Serving on %s" % on_what)
|
self.bus.log("Serving on %s" % on_what)
|
||||||
start.priority = 75
|
start.priority = 75
|
||||||
|
|
||||||
def _start_http_thread(self):
|
def _start_http_thread(self):
|
||||||
"""HTTP servers MUST be running in new threads, so that the
|
"""HTTP servers MUST be running in new threads, so that the
|
||||||
main thread persists to receive KeyboardInterrupt's. If an
|
main thread persists to receive KeyboardInterrupt's. If an
|
||||||
@ -87,19 +87,19 @@ class ServerAdapter(object):
|
|||||||
traceback=True, level=40)
|
traceback=True, level=40)
|
||||||
self.bus.exit()
|
self.bus.exit()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def wait(self):
|
def wait(self):
|
||||||
"""Wait until the HTTP server is ready to receive requests."""
|
"""Wait until the HTTP server is ready to receive requests."""
|
||||||
while not getattr(self.httpserver, "ready", False):
|
while not getattr(self.httpserver, "ready", False):
|
||||||
if self.interrupt:
|
if self.interrupt:
|
||||||
raise self.interrupt
|
raise self.interrupt
|
||||||
time.sleep(.1)
|
time.sleep(.1)
|
||||||
|
|
||||||
# Wait for port to be occupied
|
# Wait for port to be occupied
|
||||||
if isinstance(self.bind_addr, tuple):
|
if isinstance(self.bind_addr, tuple):
|
||||||
host, port = self.bind_addr
|
host, port = self.bind_addr
|
||||||
wait_for_occupied_port(host, port)
|
wait_for_occupied_port(host, port)
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
"""Stop the HTTP server."""
|
"""Stop the HTTP server."""
|
||||||
if self.running:
|
if self.running:
|
||||||
@ -113,7 +113,7 @@ class ServerAdapter(object):
|
|||||||
else:
|
else:
|
||||||
self.bus.log("HTTP Server %s already shut down" % self.httpserver)
|
self.bus.log("HTTP Server %s already shut down" % self.httpserver)
|
||||||
stop.priority = 25
|
stop.priority = 25
|
||||||
|
|
||||||
def restart(self):
|
def restart(self):
|
||||||
"""Restart the HTTP server."""
|
"""Restart the HTTP server."""
|
||||||
self.stop()
|
self.stop()
|
||||||
@ -122,12 +122,12 @@ class ServerAdapter(object):
|
|||||||
|
|
||||||
class FlupFCGIServer(object):
|
class FlupFCGIServer(object):
|
||||||
"""Adapter for a flup.server.fcgi.WSGIServer."""
|
"""Adapter for a flup.server.fcgi.WSGIServer."""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.args = args
|
self.args = args
|
||||||
self.kwargs = kwargs
|
self.kwargs = kwargs
|
||||||
self.ready = False
|
self.ready = False
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
"""Start the FCGI server."""
|
"""Start the FCGI server."""
|
||||||
# We have to instantiate the server class here because its __init__
|
# We have to instantiate the server class here because its __init__
|
||||||
@ -147,24 +147,24 @@ class FlupFCGIServer(object):
|
|||||||
self.fcgiserver._oldSIGs = []
|
self.fcgiserver._oldSIGs = []
|
||||||
self.ready = True
|
self.ready = True
|
||||||
self.fcgiserver.run()
|
self.fcgiserver.run()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
"""Stop the HTTP server."""
|
"""Stop the HTTP server."""
|
||||||
self.ready = False
|
|
||||||
# Forcibly stop the fcgi server main event loop.
|
# Forcibly stop the fcgi server main event loop.
|
||||||
self.fcgiserver._keepGoing = False
|
self.fcgiserver._keepGoing = False
|
||||||
# Force all worker threads to die off.
|
# Force all worker threads to die off.
|
||||||
self.fcgiserver._threadPool.maxSpare = 0
|
self.fcgiserver._threadPool.maxSpare = self.fcgiserver._threadPool._idleCount
|
||||||
|
self.ready = False
|
||||||
|
|
||||||
|
|
||||||
class FlupSCGIServer(object):
|
class FlupSCGIServer(object):
|
||||||
"""Adapter for a flup.server.scgi.WSGIServer."""
|
"""Adapter for a flup.server.scgi.WSGIServer."""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.args = args
|
self.args = args
|
||||||
self.kwargs = kwargs
|
self.kwargs = kwargs
|
||||||
self.ready = False
|
self.ready = False
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
"""Start the SCGI server."""
|
"""Start the SCGI server."""
|
||||||
# We have to instantiate the server class here because its __init__
|
# We have to instantiate the server class here because its __init__
|
||||||
@ -184,7 +184,7 @@ class FlupSCGIServer(object):
|
|||||||
self.scgiserver._oldSIGs = []
|
self.scgiserver._oldSIGs = []
|
||||||
self.ready = True
|
self.ready = True
|
||||||
self.scgiserver.run()
|
self.scgiserver.run()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
"""Stop the HTTP server."""
|
"""Stop the HTTP server."""
|
||||||
self.ready = False
|
self.ready = False
|
||||||
@ -210,9 +210,9 @@ def check_port(host, port, timeout=1.0):
|
|||||||
raise ValueError("Host values of '' or None are not allowed.")
|
raise ValueError("Host values of '' or None are not allowed.")
|
||||||
host = client_host(host)
|
host = client_host(host)
|
||||||
port = int(port)
|
port = int(port)
|
||||||
|
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
# AF_INET or AF_INET6 socket
|
# AF_INET or AF_INET6 socket
|
||||||
# Get the correct address family for our host (allows IPv6 addresses)
|
# Get the correct address family for our host (allows IPv6 addresses)
|
||||||
for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
|
for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
|
||||||
@ -237,24 +237,24 @@ def wait_for_free_port(host, port):
|
|||||||
"""Wait for the specified port to become free (drop requests)."""
|
"""Wait for the specified port to become free (drop requests)."""
|
||||||
if not host:
|
if not host:
|
||||||
raise ValueError("Host values of '' or None are not allowed.")
|
raise ValueError("Host values of '' or None are not allowed.")
|
||||||
|
|
||||||
for trial in xrange(50):
|
for trial in xrange(50):
|
||||||
try:
|
try:
|
||||||
# we are expecting a free port, so reduce the timeout
|
# we are expecting a free port, so reduce the timeout
|
||||||
check_port(host, port, timeout=0.1)
|
check_port(host, port, timeout=0.2)
|
||||||
except IOError:
|
except IOError:
|
||||||
# Give the old server thread time to free the port.
|
# Give the old server thread time to free the port.
|
||||||
time.sleep(0.1)
|
time.sleep(0.2)
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
raise IOError("Port %r not free on %r" % (port, host))
|
raise IOError("Port %r not free on %r" % (port, host))
|
||||||
|
|
||||||
def wait_for_occupied_port(host, port):
|
def wait_for_occupied_port(host, port):
|
||||||
"""Wait for the specified port to become active (receive requests)."""
|
"""Wait for the specified port to become active (receive requests)."""
|
||||||
if not host:
|
if not host:
|
||||||
raise ValueError("Host values of '' or None are not allowed.")
|
raise ValueError("Host values of '' or None are not allowed.")
|
||||||
|
|
||||||
for trial in xrange(50):
|
for trial in xrange(50):
|
||||||
try:
|
try:
|
||||||
check_port(host, port)
|
check_port(host, port)
|
||||||
@ -262,5 +262,5 @@ def wait_for_occupied_port(host, port):
|
|||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
time.sleep(.1)
|
time.sleep(.1)
|
||||||
|
|
||||||
raise IOError("Port %r not bound on %r" % (port, host))
|
raise IOError("Port %r not bound on %r" % (port, host))
|
||||||
|
@ -199,14 +199,21 @@ class Bus(object):
|
|||||||
|
|
||||||
def exit(self):
|
def exit(self):
|
||||||
"""Stop all services and prepare to exit the process."""
|
"""Stop all services and prepare to exit the process."""
|
||||||
self.stop()
|
try:
|
||||||
|
self.stop()
|
||||||
self.state = states.EXITING
|
|
||||||
self.log('Bus EXITING')
|
self.state = states.EXITING
|
||||||
self.publish('exit')
|
self.log('Bus EXITING')
|
||||||
# This isn't strictly necessary, but it's better than seeing
|
self.publish('exit')
|
||||||
# "Waiting for child threads to terminate..." and then nothing.
|
# This isn't strictly necessary, but it's better than seeing
|
||||||
self.log('Bus EXITED')
|
# "Waiting for child threads to terminate..." and then nothing.
|
||||||
|
self.log('Bus EXITED')
|
||||||
|
except:
|
||||||
|
# This method is often called asynchronously (whether thread,
|
||||||
|
# signal handler, console handler, or atexit handler), so we
|
||||||
|
# can't just let exceptions propagate out unhandled.
|
||||||
|
# Assume it's been logged and just die.
|
||||||
|
os._exit(70) # EX_SOFTWARE
|
||||||
|
|
||||||
def restart(self):
|
def restart(self):
|
||||||
"""Restart the process (may close connections).
|
"""Restart the process (may close connections).
|
||||||
@ -223,7 +230,14 @@ class Bus(object):
|
|||||||
self.publish('graceful')
|
self.publish('graceful')
|
||||||
|
|
||||||
def block(self, interval=0.1):
|
def block(self, interval=0.1):
|
||||||
"""Wait for the EXITING state, KeyboardInterrupt or SystemExit."""
|
"""Wait for the EXITING state, KeyboardInterrupt or SystemExit.
|
||||||
|
|
||||||
|
This function is intended to be called only by the main thread.
|
||||||
|
After waiting for the EXITING state, it also waits for all threads
|
||||||
|
to terminate, and then calls os.execv if self.execv is True. This
|
||||||
|
design allows another thread to call bus.restart, yet have the main
|
||||||
|
thread perform the actual execv call (required on some platforms).
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
self.wait(states.EXITING, interval=interval)
|
self.wait(states.EXITING, interval=interval)
|
||||||
except (KeyboardInterrupt, IOError):
|
except (KeyboardInterrupt, IOError):
|
||||||
@ -243,10 +257,15 @@ class Bus(object):
|
|||||||
# See http://www.cherrypy.org/ticket/751.
|
# See http://www.cherrypy.org/ticket/751.
|
||||||
self.log("Waiting for child threads to terminate...")
|
self.log("Waiting for child threads to terminate...")
|
||||||
for t in threading.enumerate():
|
for t in threading.enumerate():
|
||||||
if (t != threading.currentThread() and t.isAlive()
|
if t != threading.currentThread() and t.isAlive():
|
||||||
# Note that any dummy (external) threads are always daemonic.
|
# Note that any dummy (external) threads are always daemonic.
|
||||||
and not t.isDaemon()):
|
if hasattr(threading.Thread, "daemon"):
|
||||||
t.join()
|
# Python 2.6+
|
||||||
|
d = t.daemon
|
||||||
|
else:
|
||||||
|
d = t.isDaemon()
|
||||||
|
if not d:
|
||||||
|
t.join()
|
||||||
|
|
||||||
if self.execv:
|
if self.execv:
|
||||||
self._do_execv()
|
self._do_execv()
|
||||||
|
@ -1191,28 +1191,39 @@ class HTTPConnection(object):
|
|||||||
# Close the connection.
|
# Close the connection.
|
||||||
return
|
return
|
||||||
except NoSSLError:
|
except NoSSLError:
|
||||||
# Unwrap our wfile
|
|
||||||
req.wfile = CP_fileobject(self.socket, "wb", -1)
|
|
||||||
if req and not req.sent_headers:
|
if req and not req.sent_headers:
|
||||||
|
# Unwrap our wfile
|
||||||
|
req.wfile = CP_fileobject(self.socket._sock, "wb", -1)
|
||||||
req.simple_response("400 Bad Request",
|
req.simple_response("400 Bad Request",
|
||||||
"The client sent a plain HTTP request, but "
|
"The client sent a plain HTTP request, but "
|
||||||
"this server only speaks HTTPS on this port.")
|
"this server only speaks HTTPS on this port.")
|
||||||
|
self.linger = True
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
if req and not req.sent_headers:
|
if req and not req.sent_headers:
|
||||||
req.simple_response("500 Internal Server Error", format_exc())
|
req.simple_response("500 Internal Server Error", format_exc())
|
||||||
|
|
||||||
|
linger = False
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Close the socket underlying this connection."""
|
"""Close the socket underlying this connection."""
|
||||||
self.rfile.close()
|
self.rfile.close()
|
||||||
|
|
||||||
# Python's socket module does NOT call close on the kernel socket
|
if not self.linger:
|
||||||
# when you call socket.close(). We do so manually here because we
|
# Python's socket module does NOT call close on the kernel socket
|
||||||
# want this server to send a FIN TCP segment immediately. Note this
|
# when you call socket.close(). We do so manually here because we
|
||||||
# must be called *before* calling socket.close(), because the latter
|
# want this server to send a FIN TCP segment immediately. Note this
|
||||||
# drops its reference to the kernel socket.
|
# must be called *before* calling socket.close(), because the latter
|
||||||
self.socket._sock.close()
|
# drops its reference to the kernel socket.
|
||||||
|
self.socket._sock.close()
|
||||||
self.socket.close()
|
self.socket.close()
|
||||||
|
else:
|
||||||
|
# On the other hand, sometimes we want to hang around for a bit
|
||||||
|
# to make sure the client has a chance to read our entire
|
||||||
|
# response. Skipping the close() calls here delays the FIN
|
||||||
|
# packet until the socket object is garbage-collected later.
|
||||||
|
# Someday, perhaps, we'll do the full lingering_close that
|
||||||
|
# Apache does, but not today.
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def format_exc(limit=None):
|
def format_exc(limit=None):
|
||||||
@ -1457,7 +1468,7 @@ class CherryPyWSGIServer(object):
|
|||||||
|
|
||||||
protocol = "HTTP/1.1"
|
protocol = "HTTP/1.1"
|
||||||
_bind_addr = "127.0.0.1"
|
_bind_addr = "127.0.0.1"
|
||||||
version = "CherryPy/3.1.1"
|
version = "CherryPy/3.1.2"
|
||||||
ready = False
|
ready = False
|
||||||
_interrupt = None
|
_interrupt = None
|
||||||
|
|
||||||
@ -1709,7 +1720,7 @@ class CherryPyWSGIServer(object):
|
|||||||
try:
|
try:
|
||||||
host, port = sock.getsockname()[:2]
|
host, port = sock.getsockname()[:2]
|
||||||
except socket.error, x:
|
except socket.error, x:
|
||||||
if x.args[1] != "Bad file descriptor":
|
if x.args[0] not in socket_errors_to_ignore:
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
# Note that we're explicitly NOT using AI_PASSIVE,
|
# Note that we're explicitly NOT using AI_PASSIVE,
|
||||||
|
30
upload.py
30
upload.py
@ -36,7 +36,7 @@ def get_ip_address(ifname):
|
|||||||
)[20:24])
|
)[20:24])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
HOST=get_ip_address('br0')
|
HOST=get_ip_address('eth0')
|
||||||
except:
|
except:
|
||||||
try:
|
try:
|
||||||
HOST=get_ip_address('wlan0')
|
HOST=get_ip_address('wlan0')
|
||||||
@ -506,6 +506,7 @@ class VMInstaller(OptionlessCommand):
|
|||||||
|
|
||||||
user_options = [('dont-shutdown', 'd', 'Dont shutdown VM after build')]
|
user_options = [('dont-shutdown', 'd', 'Dont shutdown VM after build')]
|
||||||
boolean_options = ['dont-shutdown']
|
boolean_options = ['dont-shutdown']
|
||||||
|
EXTRA_SLEEP = 5
|
||||||
|
|
||||||
def initialize_options(self):
|
def initialize_options(self):
|
||||||
self.dont_shutdown = False
|
self.dont_shutdown = False
|
||||||
@ -546,12 +547,10 @@ class VMInstaller(OptionlessCommand):
|
|||||||
pass
|
pass
|
||||||
while 'vmblock' in open('/proc/modules').read():
|
while 'vmblock' in open('/proc/modules').read():
|
||||||
check_call('sudo rmmod -f vmblock')
|
check_call('sudo rmmod -f vmblock')
|
||||||
check_call('sudo modprobe kvm-intel', shell=True)
|
|
||||||
|
|
||||||
|
|
||||||
def run_vm(self):
|
def run_vm(self):
|
||||||
vmware = ('vmware', '-q', '-x', '-n', self.VM)
|
self.__p = Popen(self.VM)
|
||||||
self.__p = Popen(vmware)
|
|
||||||
|
|
||||||
def start_vm(self, ssh_host, build_script, sleep=75):
|
def start_vm(self, ssh_host, build_script, sleep=75):
|
||||||
self.run_vm()
|
self.run_vm()
|
||||||
@ -563,7 +562,7 @@ class VMInstaller(OptionlessCommand):
|
|||||||
while call('ping -q -c1 '+ssh_host, shell=True,
|
while call('ping -q -c1 '+ssh_host, shell=True,
|
||||||
stdout=open('/dev/null', 'w')) != 0:
|
stdout=open('/dev/null', 'w')) != 0:
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
time.sleep(20)
|
time.sleep(self.EXTRA_SLEEP)
|
||||||
print 'Trying to SSH into VM'
|
print 'Trying to SSH into VM'
|
||||||
check_call(('scp', t.name, ssh_host+':build-calibre'))
|
check_call(('scp', t.name, ssh_host+':build-calibre'))
|
||||||
check_call('ssh -t %s bash build-calibre'%ssh_host, shell=True)
|
check_call('ssh -t %s bash build-calibre'%ssh_host, shell=True)
|
||||||
@ -572,6 +571,7 @@ class KVMInstaller(VMInstaller):
|
|||||||
|
|
||||||
def run_vm(self):
|
def run_vm(self):
|
||||||
self.stop_vmware()
|
self.stop_vmware()
|
||||||
|
check_call('sudo modprobe kvm-intel', shell=True)
|
||||||
self.__p = Popen(self.VM)
|
self.__p = Popen(self.VM)
|
||||||
|
|
||||||
|
|
||||||
@ -599,24 +599,24 @@ class build_linux32(KVMInstaller):
|
|||||||
return _build_linux()
|
return _build_linux()
|
||||||
|
|
||||||
|
|
||||||
class build_windows(KVMInstaller):
|
class build_windows(VMInstaller):
|
||||||
description = 'Build windows installer'
|
description = 'Build windows installer'
|
||||||
VM = '/vmware/bin/win_build'
|
VM = '/vmware/bin/xp_build'
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
installer = installer_name('exe')
|
installer = installer_name('exe')
|
||||||
self.start_vm('win_build', ('python setup.py develop',
|
self.start_vm('xp_build', ('python setup.py develop',
|
||||||
'python',
|
'python',
|
||||||
r'installer\\windows\\freeze.py'))
|
r'installer\\windows\\freeze.py'))
|
||||||
if os.path.exists('build/py2exe'):
|
if os.path.exists('build/py2exe'):
|
||||||
shutil.rmtree('build/py2exe')
|
shutil.rmtree('build/py2exe')
|
||||||
check_call(('scp', '-rp', 'win_build:build/%s/build/py2exe'%__appname__,
|
check_call(('scp', '-rp', 'xp_build:build/%s/build/py2exe'%__appname__,
|
||||||
'build'))
|
'build'))
|
||||||
if not os.path.exists('build/py2exe'):
|
if not os.path.exists('build/py2exe'):
|
||||||
raise Exception('Failed to run py2exe')
|
raise Exception('Failed to run py2exe')
|
||||||
self.run_windows_install_jammer(installer)
|
self.run_windows_install_jammer(installer)
|
||||||
if not self.dont_shutdown:
|
if not self.dont_shutdown:
|
||||||
Popen(('ssh', 'win_build', 'shutdown', '-s', '-t', '0'))
|
Popen(('ssh', 'xp_build', 'shutdown', '-s', '-t', '0'))
|
||||||
return os.path.basename(installer)
|
return os.path.basename(installer)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -633,7 +633,7 @@ class build_windows(KVMInstaller):
|
|||||||
|
|
||||||
class build_osx(VMInstaller):
|
class build_osx(VMInstaller):
|
||||||
description = 'Build OS X app bundle'
|
description = 'Build OS X app bundle'
|
||||||
VM = '/vmware/calibre_os_x/Mac OSX.vmx'
|
VM = '/vmware/bin/tiger_build'
|
||||||
|
|
||||||
def get_build_script(self, subs):
|
def get_build_script(self, subs):
|
||||||
return (self.BUILD_SCRIPT%subs).replace('rm ', 'sudo rm ')
|
return (self.BUILD_SCRIPT%subs).replace('rm ', 'sudo rm ')
|
||||||
@ -642,15 +642,13 @@ class build_osx(VMInstaller):
|
|||||||
installer = installer_name('dmg')
|
installer = installer_name('dmg')
|
||||||
python = '/Library/Frameworks/Python.framework/Versions/Current/bin/python'
|
python = '/Library/Frameworks/Python.framework/Versions/Current/bin/python'
|
||||||
self.start_vmware()
|
self.start_vmware()
|
||||||
self.start_vm('osx_build', ('sudo %s setup.py develop'%python, python,
|
self.start_vm('tiger_build', ('sudo %s setup.py develop'%python, python,
|
||||||
'installer/osx/freeze.py'))
|
'installer/osx/freeze.py'))
|
||||||
check_call(('scp', 'osx_build:build/calibre/dist/*.dmg', 'dist'))
|
check_call(('scp', 'tiger_build:build/calibre/dist/*.dmg', 'dist'))
|
||||||
if not os.path.exists(installer):
|
if not os.path.exists(installer):
|
||||||
raise Exception('Failed to build installer '+installer)
|
raise Exception('Failed to build installer '+installer)
|
||||||
if not self.dont_shutdown:
|
if not self.dont_shutdown:
|
||||||
Popen(('ssh', 'osx_build', 'sudo', '/sbin/shutdown', '-h', 'now'))
|
Popen(('ssh', 'tiger_build', 'sudo', '/sbin/shutdown', '-h', 'now'))
|
||||||
time.sleep(20)
|
|
||||||
self.stop_vmware()
|
|
||||||
return os.path.basename(installer)
|
return os.path.basename(installer)
|
||||||
|
|
||||||
class upload_installers(OptionlessCommand):
|
class upload_installers(OptionlessCommand):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user