Main
parent
9692d1d509
commit
ec8ddd995a
|
@ -0,0 +1,4 @@
|
|||
python bad37.py --url https://www.blim.com/asset/10199 -c playready -s 7 -e 9
|
||||
python bad37.py --url https://www.blim.com/asset/24336 -c playready -s 7 -e 8
|
||||
python bad37.py --url https://www.blim.com/asset/24336 -c playready -s 7 -e 9
|
||||
pause
|
|
@ -0,0 +1,2 @@
|
|||
python bad37.py --url https://play.hbomax.com/page/urn:hbo:page:GYSAr8wiBN8IqpAEAAAAE:type:series --alang es-la en --slang en en-sdh es-la es-la-sdh --flang es-la-forced
|
||||
pause
|
|
@ -0,0 +1,542 @@
|
|||
##### One KEY per line. #####
|
||||
Sex and the City S01E01 - Sex and the City
|
||||
010077f9feb0ab3feaa14533fad3db02:4680777ac43b9698b7ab0ab7e8dc215d
|
||||
01051810dad40547af6417cd3923ad15:ffa4387c9044429d58c7173824e448e1
|
||||
0102a3562b2a4ca6f558e75d9e2e5a69:44742f93fa531725d1058900aa07cb5e
|
||||
01018b9daf7dbcacdd3ed46c0a200375:d0bd3272ad478ee9d7c2dccd68ad73fc
|
||||
MulherMaravilha 1984 (2020)
|
||||
01004200311580d98790817b5e4bba85:f36e16ff4a972e47404ca57ffa4419f7
|
||||
010518ff7f06ac32c4600a4c3d58f47e:8331bb0fa3c544b5d7c5bb7c34f5a589
|
||||
010191111658e773667a025a991f4d8a:68f8ffaa759b0a2f76ab9f298ec91a3a
|
||||
01039abe393e5a2c7fb219d2cb4709d7:2c4f6c7cf29e844145a4dd41fd612d64
|
||||
Game of Thrones S01E01 - Winter Is Coming
|
||||
0100e34e7b880c7b9dde35f77c4bc1cb:593bf887f900ebfd6fea887fe4500ce4
|
||||
01055f0f6ff8175cf7722cdfa2b74d7f:5da0b5ebf4fe112aa0e567cd42f9a164
|
||||
010274d4c60b0c31d6f886c1f4f3e9ac:96087cbd0f95970ce414bac741b66fcd
|
||||
0101c9735891941ecf7a306ca73727fe:f4a6f54a423f19e4954d5163f83acc48
|
||||
Game of Thrones S01E01 - Winter Is Coming
|
||||
0100e34e7b880c7b9dde35f77c4bc1cb:593bf887f900ebfd6fea887fe4500ce4
|
||||
01055f0f6ff8175cf7722cdfa2b74d7f:5da0b5ebf4fe112aa0e567cd42f9a164
|
||||
010274d4c60b0c31d6f886c1f4f3e9ac:96087cbd0f95970ce414bac741b66fcd
|
||||
0101c9735891941ecf7a306ca73727fe:f4a6f54a423f19e4954d5163f83acc48
|
||||
MulherMaravilha 1984 (2020)
|
||||
01004200311580d98790817b5e4bba85:f36e16ff4a972e47404ca57ffa4419f7
|
||||
010518ff7f06ac32c4600a4c3d58f47e:8331bb0fa3c544b5d7c5bb7c34f5a589
|
||||
010191111658e773667a025a991f4d8a:68f8ffaa759b0a2f76ab9f298ec91a3a
|
||||
01039abe393e5a2c7fb219d2cb4709d7:2c4f6c7cf29e844145a4dd41fd612d64
|
||||
MulherMaravilha 1984 (2020)
|
||||
01004200311580d98790817b5e4bba85:f36e16ff4a972e47404ca57ffa4419f7
|
||||
010518ff7f06ac32c4600a4c3d58f47e:8331bb0fa3c544b5d7c5bb7c34f5a589
|
||||
010191111658e773667a025a991f4d8a:68f8ffaa759b0a2f76ab9f298ec91a3a
|
||||
01039abe393e5a2c7fb219d2cb4709d7:2c4f6c7cf29e844145a4dd41fd612d64
|
||||
Fatale (2020)
|
||||
0100100f83ba50be7f20cbe8095b422c:55dd470924f512b407ce2672c6751d9a
|
||||
0105068f5057fe88847899662ecf452d:0a0bdb3381f733fca1ce94ef3be74a2f
|
||||
010173ef657a0ecf2db35888f3b890cf:144877fc1cc4029711691e3dc002d262
|
||||
Fatale (2020)
|
||||
0100100f83ba50be7f20cbe8095b422c:55dd470924f512b407ce2672c6751d9a
|
||||
0105068f5057fe88847899662ecf452d:0a0bdb3381f733fca1ce94ef3be74a2f
|
||||
010173ef657a0ecf2db35888f3b890cf:144877fc1cc4029711691e3dc002d262
|
||||
Young Rock S01E01 - Mantendo o Personagem
|
||||
0100ddf2bc8a779a1f09c368c7eca060:e3fb2ac8d1d1fa1e11e0218593f93450
|
||||
01056abbb53e5805462797aeca5568bf:35edf555765b230b1e9d7a9658d0933e
|
||||
0102c666058817aa543b63d1adfa904e:d1daa5eb5d51893aa6f9e9080800b136
|
||||
010193bbf7085e54cb9d573392abd865:9516e1c184cc999e64146e097b9bcab3
|
||||
Young Rock S01E02 - Na Estrada Novamente
|
||||
010054d60dcaeb448e0c5b3b49059a46:1f7c10dc5271f1a9c46c778c7657fbe9
|
||||
0105af2e162b824e925309c3260b5ada:0c17e76e4df2153bb39f004c28b1d5ad
|
||||
0102395832e1996319c8e03e3f63f0e7:261dc43e10427363a792e29ea72e47f2
|
||||
01015b51902146490c49e0faa2c8f5e1:fc53613bec2f69a2f4863665ff4b741e
|
||||
Young Rock S01E03 - Para a Frente Juntos
|
||||
01009d1877cfecb8779ef756af54f261:41ddba734a634ea37d9b892ae35aef3f
|
||||
01056412baea7fcd45997fdfc8771469:965d3890435d8c40e557ab5d8d49f27d
|
||||
01025e81393bb17ff3a3489fc50c7c46:053031f07db98f55187671c7cc16dfaf
|
||||
010115fd9ecf07c2e3a285dd33bca839:276eec8ce47e55586df5c781de9848e6
|
||||
Young Rock S01E04 - Cuide da Mente
|
||||
010007a3b2ebc6199cf73b41268aeb9a:5172f924aeb1877d90b6278349122a4c
|
||||
0105fc4abfd949f28fe016e9ae7d4705:771c1eccd020d940e2cb84075d242f8e
|
||||
01020d726386ce862f39738cdf54f6e0:5bcac6d1c1d5ae01573729c2f808cc42
|
||||
0101022b2d8f78a467289d744dd353b9:1ba2b246cda4daf91e22a0939f4a834c
|
||||
Young Rock S01E05 - Nao Parta Meu Coracao
|
||||
01008831029c6986c24f628cf885bdd4:a4d32c771e739fcb6d17e180d5c68080
|
||||
01058e6f1a11719c9aff718b10f72908:666b8a60b9651836c7d4787e9076e6cf
|
||||
01028536b69cd182d50a48750c1de6a8:6e63166da194d1136a4d70ed05606b41
|
||||
010161db8c5f7cefb1e577cc203165f5:3cbde197496a08230b8b5600cceb40ea
|
||||
Young Rock S01E06 - Meu Dia com Andre
|
||||
01006934cd9d4691f1dac14d1588efed:d89cfd9560fdb02dfee41b1aa5623bae
|
||||
0105681340b87f63875ed5afd9a0e458:ea4579a55ec782bf66e1727cb191ac86
|
||||
010240c33b9dd9053f77c44abc27b5d6:6238bc382509068fdc0833420cb34837
|
||||
01011e8893da248109987ac7c91b7b7d:eb07db5dde37d5653b6bf738f84c5a34
|
||||
Young Rock S01E07 - Johnson & Hopkins
|
||||
01001147f7ddd528c77cd170638d587d:42afcdaaf53b96d9dd1e4bd2b04b1e1e
|
||||
0105e3c0aec6474fca13d6802af9da30:462347dbfc08a64b4d92a8cff5f55e1f
|
||||
0102e7fb80d82e6cfbf2120b2d4eba31:2d7c93942bcc0b948046bbab3674473b
|
||||
0101b04ea21a8f9bfddb60d36d2b13b9:36a721f0dbd971913fe1b3d9125d5645
|
||||
Young Rock S01E08 - Meu Amor So Bebe do Bom
|
||||
01000a6d4324864de7e290de76c1c067:7f9f618043e3e6cfb03716366f1701ea
|
||||
01051631670ae66add688e3d06ecf1ed:19833d7a112bda41f308f830138386de
|
||||
0102dfb48a5c39b890a78273f4f38d89:7e7788797f6085409b73cba34db9beff
|
||||
0101f7462056c8453965361c27512236:ab27725060282d191763446df2bf735d
|
||||
Young Rock S01E09 - Uma Moca Chamada Star Search
|
||||
0100bec4cca40cd831dea8f8562543bf:cdb4be6be45890a2eaaed1b8b65d1fac
|
||||
01053e113d278ce468b8a73d68aba62a:134a0f7242f9b334c036c2c4e4a01e55
|
||||
01021b615ea10a68711529293255eeae:8f4358e37f160d291e51e8498ca0b3aa
|
||||
010121855d16b95c99be619cb4191a69:a23ebe421ef522f7fcef557989e17b7c
|
||||
Young Rock S01E10 - Bom X Otimo
|
||||
01003074cab5dba7a5d6f9941adadd22:56ca1b6eb1c12d806082d6e7926d26ea
|
||||
0105830951b04527ec8addf62adecc58:08fe9afc6df88a9589b5affeef461bf1
|
||||
0102d84a6e425c9d9043f9ec3132fdcf:3ecad14b703a0e79204dc046b2cffb4c
|
||||
0101cd0beb4e6daf0046655564d3758d:98f0b583564e05ab1d4567a1f836d248
|
||||
Young Rock S01E11 - Dia da Eleicao
|
||||
01000b3f384a6c956873de8c0c009660:b21b2586f7ae3c90945d776387c52e10
|
||||
010546ef4e8ffb90ec97cf5bd64645e0:4f440e174ef3bbc21e39cd2de0201563
|
||||
01026dcd7e1fe32bdf8444e1fa3eba59:d96172bb34c4b8b7ec95a52fc4978617
|
||||
0101b932547501a932fac39db1295730:640e397bcf3be27ee2b8cee05d7e3790
|
||||
Trem Infinito S01E01 - O Vagao Quadriculado
|
||||
0100a77dce692561487a5005f7575347:57b156f46b561b4def470eee03b13c5e
|
||||
0105547b931ba9da5d9ab6a8fdbfd889:3a6748fc91f05df721106b23d25e0329
|
||||
010202a90117e06472004dbc8a80103a:14c7e92589036df2dd44ca95e2519c60
|
||||
0101ec922d17a4ad84ca1bb7799f2ae5:9faf3568d138ce4432da19a09b6c86f1
|
||||
Trem Infinito S01E02 - O Vagao Da Praia
|
||||
0100f31da89cc5012fabe99622f791b6:e272cf736d9d15d5ef5920052dad7283
|
||||
0105bfc3f6d641726800af1885de4788:d2738c8b11bd0600cb7f09914d2b6ace
|
||||
0102bff9aa6d56cb536145488ce8f50f:73c53f333ef7b588953e5bcfae1320ae
|
||||
010117c7b84f8219057dcfd54a9a1225:bb6a7315adc4904daf7acbef2aa22e81
|
||||
Trem Infinito S01E03 - O Vagao Dos Corgis
|
||||
010017051096026c8b6f5392c21348fa:0ae10a3c856d925d0afe0cb2e695ad14
|
||||
0105e8680f697fa79cd9cdd11041fb54:577235ff95e21fcfe54943a82488b0f2
|
||||
01025dd8b75ebbb2cc7d8607d8f37391:17630cb656ae82646fb99df8a4ef1ebf
|
||||
01016a60ee89ff2797a08a492ddb701c:b25106fc8cd73268c7eeddf7fbd4983e
|
||||
Trem Infinito S01E04 - O Vagao De Cristal
|
||||
0100353052bae99ce777975a7ed8d154:50d3a6169f5c6bcf08771a805360dd83
|
||||
0105cf66d19e66efd2fc13179d826309:ff5d674d1af75240bfffe08b7a75e7dc
|
||||
010225fdbcac74d1cc0a00b735a5eaff:e03454aea5f613b846f4f6ce7b74c0ba
|
||||
0101c3c577164f0ff324ff45abe9d361:2c31beb7f600b2a92bedd789db906833
|
||||
Trem Infinito S01E05 - O Vagao Da Gata
|
||||
01008a2a43298ea495606c95ecbcf051:269c70959604d6732ab9015e673a6394
|
||||
010525204496216c89154119f1a296fa:cb9983c0480616b7cbbe935cb8d0e6e5
|
||||
0102deaa3e89a1e494f8337de127c30f:86feec4fe39392186d8b57c63d8fc57f
|
||||
01014b794bb65176fc3653ca830e74e6:3467d4bdb231cf3aaab4543e3b48e1f8
|
||||
Trem Infinito S01E06 - O Vagao Inacabado
|
||||
01000226c535f9b8b32102d7d385508a:fbaf4aba3cb5f83f5426fdc62cbbe333
|
||||
01056221bc8de2e19d3884aa8a910f11:9d20db6955d7eec5712f013e8d412e1f
|
||||
0102caa64ecb830002e7387bb2e62849:d5a5c77e2d547f3b0b993db3c079e899
|
||||
01016197bd7508424e470fc783490f25:5ceabff47db742c0f93d7946657f5787
|
||||
Trem Infinito S01E07 - O Vagao Cromado
|
||||
01005be6bdcd2f9854ae56e1a089d3ce:9ef241bd6b028b55e43da5690c92a34b
|
||||
010543c09f148951a86d4e6ed8af6ddd:fee9fd1c396e9e5d95c0394d308460d8
|
||||
01025b588cb1c2ca1ba60b398fc88b32:5d36ea74b93f1f56268176c4553653cc
|
||||
01016c002ff53ca43c3261086abbc1c1:645a42f7debd2c214ca7b7a49f7410bf
|
||||
Trem Infinito S01E08 - O Vagao Da Piscina De Bolinha
|
||||
01003b7bcc03d8b69e5baad3031c70ff:9228dd4a52fe088cc49659a09ce0d4b6
|
||||
01055eceb5e2d3710422ccc34c312e90:a4563d84c953d524ce3518ce30e73545
|
||||
010264903a38e874b827e568ec124651:104d1fbe42ae1ff2aa1d04c00da9c997
|
||||
01019456ff260170b4f015ea9da9d784:58a2deb06f144ed9811eabb7a653209d
|
||||
Trem Infinito S01E09 - O Vagao Do Passado
|
||||
0100db06533c68827d006f368f208736:2ec9b461f67ae4dc3c8c46cda7b20f4e
|
||||
0105fe417480ddbb404a2d95980880c6:06d1048a8d1b886d60adbae283d84cc2
|
||||
0102b6dc723d382dc21d1cd12a8ba60e:eed57ae5479ab01ba9c7e35d52a2814b
|
||||
0101d5defb3393e8abc0b0362e857946:1696c981048bec66da4b0d371b56b580
|
||||
Trem Infinito S01E10 - O Motor
|
||||
01006a7ac2b6dea3b0bb3974524e1f0a:1ce438bc6c63e3abccf414709a6f90f5
|
||||
0105e4d74df83d7bf31b294eb40ee8d5:b46f4c476d627b29ce811b45970ba158
|
||||
010259058d0738265a135144a4294975:86c5a4bdac322a1de44245f72e5f5676
|
||||
010141c5681e02b71a975a23fe6d7987:e18c666c0803946c2b048a6f86574e28
|
||||
Trem Infinito S02E01 - O Vagao Do Mercado Paralelo
|
||||
01002dfa703eeaa6cb5e5ae5093d94c7:c2b3954c3fc65692d89d961cb42bbf12
|
||||
0105a1165f4c0124fb76e121d2144638:2d42412c29fad222d366166298eb007f
|
||||
01029c9174ed36d7b6b72c389a3644a5:4d8769cf9e6a459701513d1945fb2e5b
|
||||
01011dcd22e68d659c2d23f276096fd2:21865df21586af74d729b0483aecb657
|
||||
Trem Infinito S02E02 - O Vagao Da Arvore Genealogica
|
||||
01002d58928076e7dea3900272147e79:0e39a701d95a378c9de92e7337463ebf
|
||||
010535472acdf450593a023515944584:865210e56cd0908059ea248a738112d6
|
||||
010262b79da0386cb678214505bb6873:d58bd63f5cf80cdd745abe185aea4b0d
|
||||
010102183d0dfbc01afa52892a0550f8:cdd3250250e140922c9299da953d5907
|
||||
Trem Infinito S02E03 - O Vagao Do Mapa
|
||||
01004aeaaeb8bce805287772a47edb12:efdb877fbe4db39c5dbb4a51654794ad
|
||||
0105a9b3efb90ea678a89aa75f34175f:78e807073c54fa54730d23d58c170bba
|
||||
0102c8f406595c39afe8299f09c1b12c:5f10994e8555257bd508dfe9f92ce5c9
|
||||
01011a42102c2e0c31a93ab911798c59:39e05b417eca455c93f070908948b3f0
|
||||
Trem Infinito S02E04 - O Vagao Do Sapo
|
||||
010021d75117232c1531b24c18c3122d:61c0b08791d579d6f097d149c1b0f98b
|
||||
0105c6fe22256997a0158b97e2edf65f:f572dc2f62f0b909c48fb35a5decdfd8
|
||||
0102bc953b2b207b48682c6a86b9cf69:2017e4aa3045f4e49917d73dea36040f
|
||||
0101e4542963aee71ba634c47e4e3ad2:d0ac221842b618756c19336a65c70bdb
|
||||
Trem Infinito S02E05 - O Vagao Do Parasita
|
||||
0100c5fa4542136889e6ced9d66e111a:4481fb0cb48a9d1b55d943c0616c21ba
|
||||
010501421ebf739079fba48102efd643:625fee92af72edbf0e8e30db0c647318
|
||||
010282fa32ba05153dd193d313257690:d7dc5686161142a65776d281db2ab489
|
||||
0101630bb5df9827a68c5f937120514e:96094bf48358db542bcfcb36fa49646a
|
||||
Trem Infinito S02E06 - O Vagao Da Gata Da Sorte
|
||||
0100777b4c0c4c60cdf58293db9ff60f:aca59bdbaeb29a3a9e026cbe91de5d66
|
||||
01053823d2474187ce998a4649b98c6a:9b3c97891ec0562429a408525e908e3f
|
||||
010283f3d55bdcdb99a066c62187f93b:be20d5d05bda12aefc7ad462db603098
|
||||
0101f39695f770603e7cd7233e718fff:305073711a17feccc2c3e2675fdead2b
|
||||
Trem Infinito S02E07 - O Vagao Do Shopping
|
||||
0100eebb18bda31f550616a663d70411:9a88c41b1e6154e419c71db608a04e53
|
||||
010565dc99b5a55c94af10a2f658d966:db26db5151d6c72c514038f37593503d
|
||||
0102f0656816f9d96ca7d9cf561b7720:8389365e462ab1e76bbac4349b57324f
|
||||
010186d41936f14c983a1a5781fbfe4e:14cc38f2c63f8cc104cfa64bd887deae
|
||||
Trem Infinito S02E08 - O Terreno Baldio
|
||||
01002cbbb07c5f0a59bb03fb1b5f2634:f2ec5c57801b81805a949b943e9f899e
|
||||
01056718328b80acc408b3910e4c632d:79ffc596e2f2c33add5dff53f57c7a8d
|
||||
01026c0d04247377689ec90fc46ba9cd:c307b0503666cc2b7e6374d90df9fd92
|
||||
0101896f0d5c7e8bf8240b9fca4aee8b:fa2d0fe28aadaea36774389d95a47e1e
|
||||
Trem Infinito S02E09 - O Vagao Da Fita
|
||||
0100ee249145422c84529aa22c15a851:789cb1df47813973702d683a0ceef6eb
|
||||
0105f6ba2fbe04c0dfc5e679b8fa9ce7:0727567596ee8c592e620c6404bf1bbf
|
||||
0102ea29eee03937b5a41acb01ed5f85:1a4ec821f2c6498ef9c2825eff5b01a2
|
||||
010184aa9fe7bb861e9db755d1a43dea:a0822e2bd15a90f6b7bdbdd18777ddd4
|
||||
Trem Infinito S02E10 - O Vagao Do Numero
|
||||
01008dcd958a85c59a205f8447b27b51:4551fc782cb6fdc2b99786c157c8ec0d
|
||||
01056532383cc4e8ffa067475a777c31:3b233156f07b72c99b2df3be3a96ac38
|
||||
010248b15295de961488e53707f8032e:42cf3ac0207b9bfff3f604a55411d6ac
|
||||
01017a526e4dbeef40ef0d0e68ba29db:4deae4f4aa8300ce26e7f1cda2c016f6
|
||||
Fahrenheit 451 (2018)
|
||||
0100189566132b2da3d324d04e84d757:1f38f71f9edbe5d0242c6205d64dd4c8
|
||||
0105067bd0dc8bd67598b206e0ac78c6:27a97050e2c22de05d61791d6d022943
|
||||
010277d957a83acb82845914bd603bf5:28aa523197bb9c44c989bd285d2b623e
|
||||
0101c3a540d39664f59cc44006be6fab:8fe6dcdbb11158344f66acccf178892b
|
||||
Roswell New Mexico S01E01 - Pilot
|
||||
010009f9459677f87e91d91dfec31221:7a6a0b3f555dbfbb14690454ae1d6dda
|
||||
0105a7bdb64bc304a4c5c0e4220c1c85:b3bf946609894e3143164b4f29a3ac2c
|
||||
0102c99421c06470517caa1b2d8500b8:8841f7ec433dc436cb83b1cabbf94cf4
|
||||
010105371ec1c248d4f7347a92327330:5d3b50f6537f990e4ad15064fb042989
|
||||
Roswell New Mexico S01E02 - So Much For The Afterglow
|
||||
0100c202620f26048584caa2790c67f6:99231c27dbc6958cbce31243de701a51
|
||||
0105091941943a3576a829da07d60f54:eac89d7b0edc8096fbf70f73999b4600
|
||||
010295b284ed83de4b5db7d7e767808d:f2094c7f0aa86b30206e4731be4b4032
|
||||
0101c6d44427b80c5feb165286a2f384:7fc62481b1e0977b64824c5aa826cd7b
|
||||
Roswell New Mexico S01E03 - Tearin Up My Heart
|
||||
0100732c14dcac70132458695a826dc7:61a36d4fcfd520143264f00853e9d066
|
||||
01050520af2782e19014630b1af9dfd7:4e824715a431bf47a0dfcd0f8413cde0
|
||||
01024fbdbdc97ffe5b38635540abfaa3:c800360caca6e40bd5ab4b4bf6aa2251
|
||||
0101d5c7a1d21b0f94d853a80b47ce19:b16dba1bd2df03cef6677b3445eafaac
|
||||
Roswell New Mexico S01E04 - Where Have All The Cowboys Gone
|
||||
0100cee62e1961f807c4795c926657a0:f0ad552ea5081fe544e21ee39c895d7d
|
||||
010510a3efc687d07301794678dd3b8a:f719c0f1f1d6728f15f82b710e8dd188
|
||||
01029279f9c0c0f142c8b3d8129c66d8:b33c26bd3c5fa2c50c513dfe9a42a852
|
||||
01015f70674d18165d8244f996123849:67936765750aa5b0855611de7d360e9c
|
||||
Roswell New Mexico S01E05 - Dont Speak
|
||||
01009721246d2a52b6160b6f9e42fdb4:28aca89cafc55bd9ee739069b6a4b1d2
|
||||
01053286f3c8c35291605cdc2710e153:713c08c3b78bbc39dac51c0dae89a5b9
|
||||
0102d51ff0130b48b9e065e00db828dd:0872228085548c2f9a293d5ff47f6c73
|
||||
0101a1c965f2662ebc1f399a7bb5b333:c7bd6b01746b9579f6a552a8e5dd4f8a
|
||||
Batwoman S01E01 - Pilot
|
||||
0100390773530bf551cfae7f6c8ee52d:186ea907c5c0de6038cc742c9fcd3a44
|
||||
01052bbc9724dc441ce3d984bf56432a:47755fddfe9f998aa284bc888a15dd3c
|
||||
0102ccf2dae2f4d80f8816fc3beff750:132f2f07be39786b01f4df0fe69863bc
|
||||
010127ffab8a64c8ffb3960388dbb505:894aab24f219a9abcd0911c912aeedd3
|
||||
Batwoman S01E02 - The Rabbit Hole
|
||||
0100804fbc7b6f49261c9a508d7baef8:d8026406cc8cb742ba811a10dc14435d
|
||||
0105cfb4c89bb5e75c0d1572dd5f4aab:1219ec71cbdc695cbaf7ea6192bc6c5b
|
||||
0102738070d60901250e0c4f534597a8:a8bbf3dde2a424b7a495c37868bf0d82
|
||||
0101f11b648c20a22985fdfb237184da:987e687746d81dc8f1e8fd533976c237
|
||||
Batwoman S01E03 - Down Down Down
|
||||
01001ef87f6933c2526d88da2066ceea:ff92bf6e210dac502a36f69f561bc339
|
||||
01051581acf80d10802b57e01c13334a:0e54706121b12aeaf7da634befbfc20f
|
||||
0102002b3307ed4720978bfa95217c59:bdd60deb40bc130141d24693b7275f7b
|
||||
0101977f3a31d0a594728b47637d994b:4545d72c7fd25d725f8c804925dea370
|
||||
Batwoman S01E04 - Who Are You
|
||||
0100249e976bca0d47f4355599227440:eacd7c594dd1e14439b4a2a316b9b3b8
|
||||
010532d3833836b5c8c22fdc92aa744b:d4ac953baff4587470b97e1db1b477e3
|
||||
01024b61a179615d2a98cd272f9fc7f3:333a5197f1ccc33d277e89d90919515a
|
||||
01013e95d9feaa9e3d57e0a1339e003e:f9c89721192c53b48a13ebbe953c8943
|
||||
Batwoman S01E05 - Mine Is a Long and Sad Tale
|
||||
0100a0e4aebe8d034cbbf28f2a8179f9:0fb674861e64570ebc6eec9ce32280cf
|
||||
0105f44247ffa9b64002eff95af28c73:53bf76187a190337ad17767a4e698db2
|
||||
0102bcb91c6cff38cab1e465d9630ccb:2354d918be9e715906f12dad6903123b
|
||||
01017a016043b17c55fa43b2c5c18f3e:2968f774c80e64a3c13bea2056e75f51
|
||||
Los Nobles Quando os Ricos Quebram a Cara (2013)
|
||||
01000de616bcbccf4eb5600fb90ac44e:5d950c00ddc4c20b3e00d95e0e3a5079
|
||||
010583bbff43327262f4f2eb6e833c64:8ddf8340464b7a21c38383f4bf11ab74
|
||||
01023ec68c3630fd49a3dfe4603f9b45:f9da1c37e95e0977fcbb3b050d91cb1b
|
||||
01016ba7aab5f1f5e7dbefb6bbc5f50f:2eadf2c51e1fd012fab77c6ade66ea02
|
||||
Academia de Vampiros O Beijo das Sombras (2014)
|
||||
0100bbde78dacd6a020c71194a37ab69:28b79e31b93ffb2169b2df43b7273936
|
||||
01058880ae67aded89986d9b3fd791b8:4c290e959d61881e870d43be24114ff6
|
||||
01027cfd8a17e5ce73838197c9ade68d:11216a23f9eeb2656552a46a5f075830
|
||||
01018037f262cb1a342786b186debfe8:010353bfefc7f4137c52ae145c2a210a
|
||||
Constantine (2005)
|
||||
0100a28fc8e0342fa86e9069c71d38cf:c56618c167dab5c70394266af0fe1620
|
||||
0105cbc8989234e5aaff9d526152b0a1:bda3bb5e4f5258198dc81335d5f9cda2
|
||||
01019ad2190d5dc33d03abac524a18cd:7645205622d965e678cc215b7e95f89d
|
||||
V De Vinganca (2006)
|
||||
010096cb1ee693ccb48bd8bc490e9751:b96c4609105c2cef2f905a41906f1877
|
||||
0105520057217819b40d1936bc713ebf:76587f94092cdbde301ba0ebd81d5848
|
||||
01010666fe412705fa771636f89c0fc8:2dafcf73e2c62eeacd0f6df443d40327
|
||||
Roswell New Mexico S01E01 - Pilot
|
||||
010009f9459677f87e91d91dfec31221:7a6a0b3f555dbfbb14690454ae1d6dda
|
||||
0105a7bdb64bc304a4c5c0e4220c1c85:b3bf946609894e3143164b4f29a3ac2c
|
||||
0102c99421c06470517caa1b2d8500b8:8841f7ec433dc436cb83b1cabbf94cf4
|
||||
010105371ec1c248d4f7347a92327330:5d3b50f6537f990e4ad15064fb042989
|
||||
Roswell New Mexico S01E06 - Smells Like Teen Spirit
|
||||
010034443f4e7bc81b5a406b3a4b5163:926f97e2d93584efa27041fec0f549d1
|
||||
0105a746eaf16eaab8df5c4308a5d63b:0412fa441d6f1cd44ca92ea014f07a11
|
||||
0102323a46e8b950b01b534a43a3fbc5:fc8d9449d0fa3bb19a952443f6ce70fd
|
||||
0101379f3ee6ceffbf11323803a81176:cca7ead90ee511ef0e38c2b59017804a
|
||||
Roswell New Mexico S01E07 - I Saw The Sign
|
||||
01007d4a6ea40d8c5be14b4cb37fa60a:94e005d1b0f02254f18cc8b3f6dcdb69
|
||||
0105b60a05f571c44553320aecbbae19:59ec4a87cba2fc2df042486c4e1501c9
|
||||
01020524d0e5a38ef18932e46d365969:eb71d5e0d4ce5667bc795e8e819e1330
|
||||
0101b918f70f4b6faa9f2f398d22f00d:e6e8bead456741b747e720bd57b9d6da
|
||||
Roswell New Mexico S01E08 - Barely Breathing
|
||||
0100782e5f39937f061d233438013591:9a65b3b177ba184180068511fa234174
|
||||
010564ea37554c7432227b31d2b8e405:04ef7d92f2c7c0fa3020f5763827e2b7
|
||||
0102002f74ffa263d7c9fc24e9774896:5fa1647d6dab73d65508b9bca11726be
|
||||
010161303ce5b78c279f02846f2f4805:eeccb06b757604d50ad7c7c7fa58b4f8
|
||||
Roswell New Mexico S01E09 - Songs About Texas
|
||||
0100b9738d03173c982180911b0b8950:387282e8cccd4955baa5d28166e84065
|
||||
0105612a51be75d205e5081c26a31898:77134361436e3d6759e856d249d6c173
|
||||
0102664f8ef4015db568a7dade8b9957:3b934aafcb07bc45ba1c6539f43e80fd
|
||||
010103e49729c908b66086f1482ed1d2:1b72c8fda103607a20e8bf033588bbc8
|
||||
Roswell New Mexico S01E10 - I Dont Want To Miss A Thing
|
||||
0100a2e059ca2ad0c9e97b3434ecaba0:aaa4a9285197d54b3889eea9bec4eb39
|
||||
01055c61e9b0afe4c4acb6e57feef77b:f8468d15eb82088ad0a927eaf19b1f2c
|
||||
0102e5094f19e36fbdf79ebdd064b905:0e3b9badc03ec540c63e894c46cbc7ab
|
||||
01019f2d0cae2be25cc2e7aa3a6b7c30:b5e0b5d687677a8706cd25ddcdab8840
|
||||
Roswell New Mexico S01E11 - Champagne Supernova
|
||||
01005b94358a875f8d6617779bec40ac:ad37e726fac17480cb4a85a34046c752
|
||||
010523fb3e0e2bce86427b3a39738f96:4a49ad96a833ddb32d6ce863ea5e3300
|
||||
01029b07ecbcff2ffdd601a7f21e0594:5e020da6eb0b3787292e4fca71b638da
|
||||
01011dbaddf66721ec26e83de8fc5a9a:dceebce554e274dd2f8f3db354e37997
|
||||
Roswell New Mexico S01E12 - Creep
|
||||
0100cac8353d3c6787e054c2b488f2af:ba47001f333895161ecb15710228ec48
|
||||
0105800191f10bf4804198709e825e78:9992fb4023ac7c3232fea229eadb7247
|
||||
01029c8b7bbccc661a00965e5c3f9825:3bf0049865751ea48ade9b3d12512f2e
|
||||
0101516d7b235ddcd0f6f09df2c9386f:fd7e04d4a74540213b98a072c671a4c1
|
||||
Roswell New Mexico S01E13 - Recovering the Satellites
|
||||
0100f2351021ae7dd608b4cb49d89174:c8de71653719fb128a11f55384bcfc55
|
||||
010593faace129e34978202ed76f40a2:056130c72c5cbfa8861d6429c8e9ad3b
|
||||
010245a904a8da7ae2e0dcaca3eada0e:867df482bc59b05edc184d7dc8230e64
|
||||
0101a5cfc82dee23ccb5f755690d02e7:37b8160904d675fed201ce6f28d9e58a
|
||||
Roswell New Mexico S02E01 - Stay I Missed You
|
||||
010055e93ba1d37f640ff15831420cb4:f1ffba9635c9ba8736e74d11664f3861
|
||||
010583d603a8ecc72c4ae5cd0b8df6d3:e8c9da174ea524f1d21e79170064c23c
|
||||
010282ca77c5c6e9706850c5f9c4a935:33f5f604a36af1340a6c37ae0039c39a
|
||||
0101042ea1a5c2ff8a01b865d468e29e:3a17fb9ef920f2dcb97c55791ba2ff54
|
||||
Roswell New Mexico S02E02 - Ladies and Gentlemen We Are Floating in Space
|
||||
010000ac254ee1626d6784b34792b67e:5adecbefeb06305e2159d0227519b875
|
||||
0105bfa934bbf4977bce5ebb3e06e06c:1c693045499ace70660465480483f8a6
|
||||
010253d663e0b7bf99a9aa36c9618ab5:342709a8d38f5d465261b629df184aef
|
||||
0101686551fb96dc2f3d85ef609c53c5:1edec050dfceaa348ad8b1086a5a8f18
|
||||
Roswell New Mexico S02E03 - Good Mother
|
||||
01001d6f712eeae283179ab38e5603ab:783250752649649564daa266fed62211
|
||||
0105402438b79202cbb0684f3927c5b1:46be973eb922160f7bc3dffd58e7ffb7
|
||||
01023fe7621a1cd99b3630f07d98e187:ea65b608358103e9488fa1d031ad5319
|
||||
0101802d9b5715dd523dd73fea64292c:5043808db6b3dc251c7b43696a93065a
|
||||
Roswell New Mexico S02E04 - What If God Was One Of Us
|
||||
01009751d7168cde6f9e3dc3cadafe23:32f5a338462392131a89e163150e96ad
|
||||
0105832d4f2d2d08211abeb798aa60a7:d287ef2a28616a9e03b176e8e5c9ffcd
|
||||
01023650f1ea9b8505d7b25ff9ddd9ba:fa2ec0df5ea493e982b024e839a14d2f
|
||||
0101a58a6c0cc0fe1d8b9eb79c75c84d:034775c84ef646caf593547bbd027160
|
||||
Roswell New Mexico S02E05 - Ill Stand By You
|
||||
01003c170610acdc145705925c75dc8b:4e03ed142b3d3afd6ede3e8222469de0
|
||||
01053fa90075e56507d2f08d07418453:43a859f3a0c609909e3ef97836f6a089
|
||||
010247764dfd8b01bd94d35996268f2e:cef69b6a979225da5db5a6a9b004fa1c
|
||||
01018a1da07eb10957df4f08badb212e:5f61c40d86ca117d1c000779c5fce26b
|
||||
Roswell New Mexico S02E06 - Sex and Candy
|
||||
0100c9859aa41447c281fbee144eb3ca:40b2ad310838bd37717d1dc0d6d2eca3
|
||||
010525ba85b6a8424e336f0be886c611:82f7e7b28558bed9fc40f9c8511a1e10
|
||||
0102d639e7b1f52df71d4f65825a7de6:8c20b5ff3ed796731ee921cd97542f08
|
||||
010182bce7a49e69c13b86cf9150d5ea:584a366522669f1d8e7659b6d6029fd3
|
||||
Roswell New Mexico S02E07 - Como La Flor
|
||||
01001b413d2ccb50474f0d880745f3c4:d292e5f949d34d90fdb945ea00740098
|
||||
010529a8174856d7cad1a986209bb5c7:65b41e85079359df44d5d6ff718fddd4
|
||||
0102618670e1aa05e4c9c46b0d5f3fd9:43a21bcbd6f5eec2671c1d0fbadadeb5
|
||||
010157d4d2eb11cb63f7213e34f063ed:a5c9fe3cc669f3f5859101c2f1bf3307
|
||||
Roswell New Mexico S02E08 - Say It Aint So
|
||||
0100bedd87f02e11a747c43ef356b72e:2503e4ab728e5cde26857e76d00365df
|
||||
0105d127239b734a806f233384c6c9df:8f85088049d0f6bfa1f5198b37366136
|
||||
01027b32a815863e1e6fa75716275f9d:6a3c68b3128041e1ac1ebb755421c8d3
|
||||
010123fb1757ec91b45ed56957113074:6e5ced55be74e7e033d88e4a3a7534c6
|
||||
Roswell New Mexico S02E09 - The Diner
|
||||
0100e1f297458d05a76eaed3cf707eb7:1ca521fa6dfb6fe1cb60f0747a37affe
|
||||
0105ff7a4f71564cbc06f14dc1eb8449:e5a065a6c12461e2b07a6e9203946d60
|
||||
01027f165b3eea634c9133e0c9e0b393:277b885c14ea631fd257a9a411f2d66b
|
||||
0101ad07886f6daa9b75e5529f2959bc:1857d8bcbeebc47fc205c8b93d08a404
|
||||
Roswell New Mexico S02E10 - American Woman
|
||||
0100d36028910f5f9b24a643e1698fcd:1088bd62e153f23d8043d24a3509380a
|
||||
0105c9dc1361ab8600255fba8af57296:68be26b8fc07a78e63a7d3debf810c64
|
||||
0102427c557dfcc23459d547a8a44d73:65c3e1667bb8508319e42af3851e6684
|
||||
0101b4daf13d1e085154921bc5337d59:ecfb22dbd453fe88b10d2eb55a170e6c
|
||||
Roswell New Mexico S02E11 - Linger
|
||||
010016090cfe549f0643884b39e3eee5:8f9ae671bf9386e77acbe911280cfc23
|
||||
01056e0953089b990bc39acca0232bcf:c3d86b7ca874e710444dec12d3cc5859
|
||||
0102bbe47be3edb9b89e21817f31a3a9:80e294ade70dfacf27276ea5dfdd902f
|
||||
0101dd3d866e9543a4f8f85db766310b:c385ede674dc33e30ec640745ec9f843
|
||||
Roswell New Mexico S02E12 - Crash Into Me
|
||||
0100012a101dc5708561a136b086d145:7ce8957327b017415fa40673deb088ee
|
||||
01052ec32bc339314664e7b99ed2f5be:1bb4e43ccda18143ade33be872b4d311
|
||||
010251ac1b67c50cb4d7c95e083a09f7:2a893ffba30e18bb9327eb2977a1d3dc
|
||||
01018cc4c476b5aa25290c03480933bc:372ede34c3519699f338c27a7088b19a
|
||||
Roswell New Mexico S02E13 - Mr. Jones
|
||||
0100e12f3fe7b6a545324f33d951caed:1afb8cb18d7874b3acece3c6c07df860
|
||||
0105d249d96c6fab93d1c3e313effcf0:459830b4933df00de270b92c5a883bdc
|
||||
01024b21ea6e6ea711ace86d1a55e4f1:97977e8d89f138055aef5ccf86d09bf6
|
||||
01018e618a3607c17602f97fe730b053:13f6347051f503712b89bf4737f73532
|
||||
Batwoman S01E06 - Ill Be Judge Ill Be Jury
|
||||
0100035d436fd3a38a4c67f52573795a:1070f9f940999f6668bf84ecd35bc25c
|
||||
01051df4f69aa25f62357fa6ab29998a:564637292b4cb57a8ead1b82cb477962
|
||||
01021af52e84005c77165fa8f5d5dba2:1d20fa6ac6811bd928a63e7c61fd532f
|
||||
01016d45285579f0b6e18f99c3cfe39c:32e8db3b1cf4c6e055efd19c089d7c1d
|
||||
Batwoman S01E07 - Tell Me the Truth
|
||||
0100d25d1d3f643e1375ba89566b6278:5d5fac6df13d4971d02df9abcefe7643
|
||||
0105e8ed57956430dbdf6c1e81cc294b:6ec6a00938e5b5e636bbb3539cb272ce
|
||||
01027f276f8031e04ab0b4ec6428bb41:853f4d05533edbd3cd68a5e5895e2919
|
||||
01013a9c3e68630d14e4229f8390e780:41fc467d248521addd83c91f53694253
|
||||
Batwoman S01E08 - A Mad TeaParty
|
||||
01009c66f4d085d088a2ab296a77e236:69d15a1be1d1af6ed8b59f9e9b070d2d
|
||||
0105c96898605d53d193e3ccea19eb2c:ae7e989db4d7d3557b7b809faab52e47
|
||||
01029dfd6f8068ca2d5b9e59ef221d45:5f9c617ccc9b998c8286c8ce1301c84a
|
||||
010134898d8d9e2ac6628fde4539e65a:3ffac8954cf5ceadfcb3b876eac90d6d
|
||||
Batwoman S01E09 - Crisis on Infinite Earths Part Two
|
||||
01004886cd5197e5cc90675b5d28b9fe:46ded550117402ce124db576452bb76c
|
||||
01058f4e3f078e386b61f565a967b07a:44b226f2f5b8d473b9ff73674e1d3f57
|
||||
01029ab729da9b7830eadc016b508f13:293dd873bc8bae539982c502a6ba7ce7
|
||||
01011c4340d1e63b831278f3a4d2bba0:078c67fdee03973ec6a990dbcf7cc0fb
|
||||
Batwoman S01E10 - How Queer Everything Is Today
|
||||
01000a64cb11f470025f7861dfea245a:6d200458ea6e5c76d745cd5f0e829362
|
||||
01055d2a5362c6782c7ffd323486a09a:1e9b76a5767be9a3e0f2c80a40cfea20
|
||||
0102d86a5d895f8f2ba83d37b79a2091:1782f785676eeecacff742ed2b7f208b
|
||||
01016c2da7936118823143f0326ecd02:fd262e5c19d05ee5b23b4e82069dfd4e
|
||||
Batwoman S01E11 - An UnBirthday Present
|
||||
01002ab23a6dc8570cee971720a9120d:22a4d9f94995210a6c91c200f042882c
|
||||
010578fadc01321fc91ed35208ca320b:db1d7713b323aea2ebd251679a82f42a
|
||||
010257259922aaa29c1a1b716ac3b639:213f968b7422156f264ee58ff7ece5a7
|
||||
010180a020a440a1bfd411119a29b287:f2d98d47a554dc590372c21c6104b0fa
|
||||
Batwoman S01E12 - Take Your Choice
|
||||
01000e9a1e5204c71cc677de16fd933f:cdb3b3ba7db9084fb878d90821d30f64
|
||||
01053f10dcb1eaad286c6532b422ddf8:57a5a7fd74fe851b20a4050f179bae67
|
||||
01025d50b1578f95e994b19582efd860:202007c453cae0148fa14ac0095b3af6
|
||||
01015d239cb82d487d3e5636bbfad079:629438e17c8671607fee73e8ede4ab22
|
||||
Batwoman S01E13 - Drink Me
|
||||
0100884863936f6672128402a9acd8c5:8a453eeb7af6d8d51a8b7068ccfe6641
|
||||
010581ee86847d2e3c818d47fe1b30fe:7c052539028c5b6a74ceb06aa1f8ebb4
|
||||
01025805daff8af753dd39dc68ec1815:a1f0b35fbc5719243274d6e706e176f6
|
||||
0101b8adc416982e92eacee5a4b18f1f:37ef15b8d35efd9228905a7070bad093
|
||||
Batwoman S01E14 - Grinning From Ear to Ear
|
||||
010008d0e901958c2082ff2ce6088099:86b5d84862abe0a0a3f572c5c46cc5a2
|
||||
01057f104e7198972975ba84b6b541b8:ddbfa02956875473a716475c571dae5d
|
||||
010201e7e0ea9ed068fb3e0453184e35:37e23fb6384249d0697aae659ef0ed90
|
||||
0101cbf0440eb922dcf5fcccf0570767:4d47c2da04240526c5b25a196626c5c3
|
||||
Batwoman S01E15 - Off With Her Head
|
||||
0100eea2f5e0a428730c95806f7249cd:e2e6db528dafac89b3e7962fab63908b
|
||||
010535f25a3256ab232c5ddbeb81f012:41c2cb9451c329001c0a18b80c43d298
|
||||
010229edd2c096e619fa7f927caf97cf:ae3653fa959da30955ea6f8c83c403cc
|
||||
0101e4f72b01eb1cb4715784f07ceece:2a76eb4a2c4236dcc98837f04066842a
|
||||
Batwoman S01E16 - Through the LookingGlass
|
||||
010027922226ebb00a92105c085f4c4d:5c05d0608c14e86b88da68892b427706
|
||||
0105544adf7cf66b8ad8e9fe8a702047:1726c1ae555e855b157cf0157881f499
|
||||
0102719b3c51b0326d5c30f5a8946416:ec9b8f2d4eebc2673a496cad0afd5439
|
||||
0101d6eb53f4e0347945fba8c9130d2d:a78547c8c6cb47f035cee5d56e00be63
|
||||
Batwoman S01E17 - A Narrow Escape
|
||||
0100278fd6667cec4d1a631cdf796384:948d90a2e4dde44c954485f5b5ea80dc
|
||||
0105bd0d61743eba7c3cf5b04fb7bcdc:2a8b0e496aa7ad94a3a4d1bdc98ab35f
|
||||
0102830aad98fd995bbb9681f3a1f7d7:a8469b7875bd80a1ee7aef95279ede4b
|
||||
0101d894d56bca027d036d1164af4126:062fb193e464618a7d7e6a4bd7080d83
|
||||
Batwoman S01E18 - If You Believe In Me Ill Believe In You
|
||||
0100e7d1cc7d2f4fbc326ab64732c614:5dccf502aaa33bc9207e686df509f9ba
|
||||
01053d5a7f6bcca4ec10ea3a7b98e10f:ef28d2f6c0e14331fe618ce0e800655b
|
||||
0102fa83cf3a4825ba712cce81bacc41:5920b927611a8ed73ac62c7d7e5ba62a
|
||||
0101c622ab93565ca301835ec0f2511b:7b0b5c4004620f45ab0f042763696e3c
|
||||
Batwoman S01E19 - A Secret Kept From All the Rest
|
||||
01009499add899323e56c93a3b6cacf4:b1adf953667c7f87722ef95002b3a7e7
|
||||
0105295e8784d92b43eba52bebce3925:2d6582ec207ef31d1449ac56eaba83bb
|
||||
01029b257bd6d060d1f3a3e3de3aefe6:be4e0463eaa8ca6422b05290d77b6e53
|
||||
0101ea890c84392a4e54245cedab0c48:7ea76ac53dd68d01687e25c3f995659c
|
||||
Batwoman S01E20 - O Mouse
|
||||
01007f429a8a95178d08da9f0c2dee9b:5bf08eeb0fd96f063090f1ea1e2cd456
|
||||
0105eb376ad767a419ce1654a0a49e25:e196f8a0561331d57c6401877e10bcfd
|
||||
0102e2eef8c3d3de5ba578df060159f3:bd0147fd33459dde65e0802a21271be1
|
||||
0101215a2e0e22edb5c1cdbedbcfde23:28e09a6d987a52ec5df8dd3bbfd786f3
|
||||
Batwoman S02E01 - Whatever Happened to Kate Kane
|
||||
0100195fc40e55337ebe9e2c5c9f6b18:fdaecc6a251271b6421bed72c78b7d55
|
||||
0105c89526a76a69a36d8ec66a1184bd:a62bc8fedf13935496e284f5778a497d
|
||||
0102f7735eef546b3ba225515caa1ed3:1a4f9e4054189570cbf99aa7762e9f4b
|
||||
0101195a7385e588dd2455e36e8a5e3f:eab66efb9557ac3fdd85df45f437f989
|
||||
Batwoman S02E02 - Prior Criminal History
|
||||
0100b4d9d53f3633b9cc9e7a21fbc556:ef5851feffe14998c2cc4bd0e0bc055d
|
||||
010549a2945d735bba3dd43274939b8e:9872e4151ff3f95ece2a2067f5bb1caf
|
||||
010275c53a546b20bf7647a73fe98494:37196cd33af243279ef2722b38b3f26c
|
||||
0101b7694404a788549527493393762f:2f946a6f792be8529dd5f1db221937a6
|
||||
Batwoman S02E03 - Bat Girl Magic
|
||||
0100a5706a179fee0f576bbc429c269f:8144bcc919ae18b34ae2e2177912a11b
|
||||
01050d66178688aa7b254603127889ca:551a411ead678cfb7ffe7197a31aadb0
|
||||
01027d7a7dd4464bca3b4b4229a8a848:38e455dff11b4a340c25966ef8e94b59
|
||||
0101c978bb80268c723564c703c48eb2:af4a79ebf0b2fe7a8fa1d5ef4ed1b841
|
||||
Batwoman S02E04 - Fair Skin Blue Eyes
|
||||
0100c9d92ae6eb9a8d054f5c64dbc7f3:0897f739796e0b3d8a5098a58804763b
|
||||
0105a6d106a9caac09c7990cbe6922d4:ae8ed94d6a0dbfc2d87907fcbd208988
|
||||
01027727be2becb3801bb23ca5f82030:cd169d2316ae7c8cc1b5ace5105802f7
|
||||
0101c7e2f00526422737ecaec7cfbfd1:068ccaa9ba41121bdf9a671a6f6b76a2
|
||||
Batwoman S02E05 - Gore on Canvas
|
||||
0100785a4a08f4670ea51d3a27e58cf9:769874aab85443e8f3f748d5df91a18f
|
||||
01050c76675bb6ce29da708944bf6784:d75a72be1c65efc811473f5c5e2bdfb8
|
||||
010239a841c096e7bcdda365dacbb7b2:48445f8c87883bfcfef0fc36ae6534ab
|
||||
010185d38aa8b1f82a91a0a23ffff890:3e914a141727f9711a1c7525a9b24552
|
||||
Batwoman S02E06 - Do Not Resuscitate
|
||||
010067302a677a674b9be4be621102f0:ffd8ad6826e7f6d61850945a92d43632
|
||||
010566dee26f7208094d4733918fde49:72fcab9cfe718de35f1974a4a70c34cc
|
||||
010249de755d9eeb5339dc587e366ff3:b46bf47aea22e546a3723fd6eed3c99b
|
||||
0101f27f0727a7c31b71008c7675b7aa:0671dd811f889beaa942a56df1b1b406
|
||||
Batwoman S02E07 - Its Best You Stop Digging
|
||||
01001ccca8ef2d5dbdb860c8f1ce5c61:75e69bda9b37058dcbda7da681c89c1f
|
||||
0105b856318d8610a2e93852c4a06c2b:dfa1886f3e7806547f220626879ff261
|
||||
01027c26e9750fef7aa25d3210872d64:2f3870c689616032d949c9db19985d51
|
||||
0101cd39afdcbcc06956ab9ae342ac63:2ae5e2a56f53660c4a54b553c5b2d6a2
|
||||
Batwoman S02E08 - Survived Much Worse
|
||||
010038429c900119a5837907df39e84c:56a45e9a47b31325f99f11278c59cf50
|
||||
010531e02e2f828a4e878ee57fe1aa36:ba75877b0c84eab324d9728853d7cf35
|
||||
0102dd204092dc81083e0ae685fbbbcb:b1ee9175b3e1f368c3900e3ff529b492
|
||||
010146121de7b4e8145707a9c1e4ca1a:f300e28d1808d53dc21695373e47b63b
|
||||
Batwoman S02E09 - Rule 1
|
||||
010075d63eb37e1c8cefb01fe1480bc1:45de633f9841312698535cce35b89e69
|
||||
010544c8ef36126b4ef2d23691348687:22268c051049c705538ef008923c6fea
|
||||
010212d6be72ea1061b0ecc51030d37c:13c731c1e293bfecd99d53f3f9365a12
|
||||
010137223021b950c1f0f6c9c490ba85:4c2abd22a6230d15959fedbcd6279bb6
|
||||
Batwoman S02E10 - Time Off for Good Behavior
|
||||
010085342cff58ddfedaf55d11b5e7b3:b6b52880e492c945e876d873b66cf65e
|
||||
010587e645752c67b65c55ed2edd2e9e:33975f1b9a5d240de589b405e6e5747d
|
||||
01020d63aeee2f671edbe5095245f0f4:ae95ccaf09ecff58ee85b9bd0f7d0cdb
|
||||
0101ce423176a0c35b8bf3ae2f3f5d23:25ddd879474fa73d33c60ce6737e5c29
|
||||
Batwoman S02E11 - Arrive Alive
|
||||
01007f4ec4b3d8b75800804c17b592ea:668c05c2794fa19f0b04b1f892b7dc42
|
||||
0105802fecfb861e1d9737d131532114:9e3c7395dd581298526ebcb61a7c92fd
|
||||
010239033eb000df46e69eb432549eb0:a08ee9f02ab8888f364d13e157f448ef
|
||||
01015bb29f71f8ff1b7b144a8ab05bc0:8c99cdc981fe0e1f9f8e8a71bccffaff
|
||||
Batwoman S02E12 - Initiate SelfDestruct
|
||||
01004d5bbd5243775c78166fa0b89df6:16516f29315498b6f2e9560307fda7a1
|
||||
0105de80a6ff76c7eaef30a72d56cdc8:19a58cf6c3e652ee9bd48642c176d86e
|
||||
010202d7403ad7de57669411bec8f6f1:3fc4d134442ebf69feb06df300b5d016
|
||||
010196bb776270cc0572ba965646d6b5:f7862b33719a782397e78b9f247aad4e
|
||||
Batwoman S02E13 - Ill Give You a Clue
|
||||
0100fb47d5ca8cdfb21bb659e262bee5:10f38081af0bda73c82443328cb49703
|
||||
0105af1b452522d5f5440768bfd0d56b:254be27bb967020e68c9a5d5d73e38d5
|
||||
0102ec412eefb75828dc9a72e583009c:da566dfaaeee1ece0af84f6d30f0edc4
|
||||
0101cb4d2d717d2253c43ee8d9b60a04:4d0076282cf7a9be56995a8c0f8d8bfe
|
||||
Batwoman S02E14 - And Justice for All
|
||||
01008877409569af4a2ead433f5af718:6ccaffc68f3a6aa2aaf98b8496b0ecbb
|
||||
0105fa00a369032379aa208fb46fc303:99121febd1ae9cdf2c472a68cc1b8e5b
|
||||
0102e3230f21c8bff8004740d89645cb:751687d4340d81c174d0f712d5dbcae0
|
||||
0101ebdbd11b670e91c268874c3109b0:7e4f10f5a0790c62ebe9dc4d5de9d33e
|
||||
Batwoman S02E15 - Armed Suspect
|
||||
01006ba0ea1f70084983e770c630bb36:c5f58a392516ac21ea1300ea04a669e5
|
||||
01055bd10b9f06d738e3d9a2b13edc7c:d492ed34fa7149844c7bf03c8ae47721
|
||||
0102b306556fb84582aa409ac4b6a584:281d9dd82d8843ca41f7092670d48800
|
||||
010152228cd2e39ea973f88d973dcd62:d93bdeb9dde3737e90a8fafc4df21427
|
||||
Batwoman S02E16 - Rebirth
|
||||
010021bc64ac431f4df958583a483201:086f250bf075edd18a24e8fa6f8ed65b
|
||||
01052ab94f1f64018300d1bb3077802d:91d163a8aacc0544e70f7eea763b3833
|
||||
0102089519e709cba16179a3ca398904:e0a082c1b0e34fd3cabc73265993ffd2
|
||||
010108362bd69113b4d27d7d986eb055:c683ed40543517865cdcf850c7ca5822
|
||||
Batwoman S02E17 - Kane Kate
|
||||
0100e13e59e28292dfe6e08991c0e61e:24f3a32b3ef62df60777e75baf540cde
|
||||
01056dce7c173e82c3eca7ed6f1d2b75:91ab12140d5c3ccaf4561fb3eab4213d
|
||||
010246b98e624bf1bb31458f857c22a8:97f45fd8d9c60d1e6b6f5591abae8d1c
|
||||
0101f682ea4a056ba5511f777c82fd0d:f6f9b45c4ad53548fafc97d1e309e5cc
|
||||
Batwoman S02E18 - Power
|
||||
0100c5624fc4fa47b5c771ba1f0879de:d5410e4831ba98d33ff18303aee40227
|
||||
0105eb5c07d078585c774abe909ca767:d3e9ddd1ec2bba81db00179c251571b5
|
||||
01028220ac9cd86cf7bbd47c6393b597:e329fc2c41fc6221a8eb84964c9f1c85
|
||||
0101b59a867c9c344c33bb9564a7812c:ff99c5cbca7e80dffedb4148bc21e5d5
|
|
@ -0,0 +1,31 @@
|
|||
##### One KEY per line. #####
|
||||
Un Lugar en Silencio (2018)
|
||||
32c7ac2797ae4ca8990cdb917f418a28:9363b26f7c599512db7f33b2eda3eeae
|
||||
Un Lugar en Silencio (2018)
|
||||
32c7ac2797ae4ca8990cdb917f418a28:9363b26f7c599512db7f33b2eda3eeae
|
||||
Sabrina the Teenage Witch - Piloto S01E01 - Piloto
|
||||
540009bd9b0f4e1bb4655cf7c0a30ab6:bbbc41bb980fbf4134eec5d9288d676d
|
||||
Sabrina the Teenage Witch - Viernes De Panque S01E02 - Viernes De Panque
|
||||
78fa06031ee94c2f9b1a7feebefd9dd1:86d96275eb806d5f3156ca1a45c30251
|
||||
Sabrina the Teenage Witch - Viernes De Panque S01E02 - Viernes De Panque
|
||||
78fa06031ee94c2f9b1a7feebefd9dd1:86d96275eb806d5f3156ca1a45c30251
|
||||
Sabrina the Teenage Witch - Viernes De Panque S01E02 - Viernes De Panque
|
||||
78fa06031ee94c2f9b1a7feebefd9dd1:86d96275eb806d5f3156ca1a45c30251
|
||||
Familias De La Mafia S01E02 - Parte II
|
||||
69e84b2e7fee46bc9630c7b4e8256d79:247b446e06c2d83d1ec70cb5cb2b50a2
|
||||
Familias De La Mafia S01E01 - Parte I
|
||||
db6595d6f64444208e0e0cc2080f2d50:89ad3c24568875c667293edb841eb74b
|
||||
Familias De La Mafia S01E01 - Parte I
|
||||
db6595d6f64444208e0e0cc2080f2d50:89ad3c24568875c667293edb841eb74b
|
||||
Los Padrinos Mágicos S05E01 - Día De Mudanzas Secreto De Familia
|
||||
4805cc1c5c554ec7b009c5203f559e50:066732c2c49451ecc13c3e1a68c4a376
|
||||
Los Padrinos Mágicos S05E02 - Mi Hermano Mayor Qué Diferencia Hay
|
||||
5634bd83530a4352a9eb05cbe3a17807:8ac3a06198dff4e5ec4155fd30b76841
|
||||
Los Padrinos Mágicos S05E03 - Un Ataque De Inteligencia La Academia Del Ocio
|
||||
f204b48ccb08400e87c3ec1b76a4954e:6adabdf7f867be3a42b0690d4b390739
|
||||
iCarly! S01E101 - Un Nuevo Comienzo
|
||||
41432623869945ee962c02bcac0109ea:baf729e875fb5431a2f2059189c54e60
|
||||
Acapulco Shore S08E14 - Invitadas Sorpresa
|
||||
873043e2654246fd9fc51fea9833b0c5:fc6e03c6222a77d760dbeff5869960eb
|
||||
Acapulco Shore S08E15 - La Fiesta Final
|
||||
31a46457e5784ceba40fe333b093d758:d1cd078dd97676c2fc787976882c72ec
|
|
@ -0,0 +1,6 @@
|
|||
python bad37.py --url httpsplay.hbomax.comfeatureurnhbofeatureGX9KHPw1OIMPCJgEAAAAD --alang es-la en --slang es-la en -q 1080p
|
||||
pause
|
||||
|
||||
para blim descargar la mejor calidad es el comando asi
|
||||
|
||||
python bad37.py --url https://www.blim.com/asset/7607 -s 10 -c playready -e 1
|
40
README.md
40
README.md
|
@ -1,2 +1,38 @@
|
|||
# HBO-MAX 4k Downloader
|
||||
Download from HMAX
|
||||
|
||||
<div size='20px'> #HBO MAX- BlimTV -Paramount plus 4K Downloader
|
||||
</div>
|
||||
|
||||
<div size='20px'> Tool To download 4K HDR DV SDR from HBO MAX- BlimTV -Paramount plus
|
||||
</div>
|
||||
|
||||
<p align="center">
|
||||
<img width="200" src="https://github.com/Kathryn-Jie/Kathryn-Jie/blob/main/kathryn.png">
|
||||
</p>
|
||||
|
||||
<h1> Hello Fellow < Developers/ >! <img src = "https://raw.githubusercontent.com/MartinHeinz/MartinHeinz/master/wave.gif" width = 30px> </h1>
|
||||
<p align='center'>
|
||||
</p>
|
||||
|
||||
|
||||
|
||||
<div size='20px'> Hi! My name is WVDUMP. I am Leaking the scripts to punish few idiots :smile:
|
||||
</div>
|
||||
|
||||
<h2> About Me <img src = "https://media0.giphy.com/media/KDDpcKigbfFpnejZs6/giphy.gif?cid=ecf05e47oy6f4zjs8g1qoiystc56cu7r9tb8a1fe76e05oty&rid=giphy.gif" width = 100px></h2>
|
||||
|
||||
<img width="55%" align="right" alt="Github" src="https://raw.githubusercontent.com/onimur/.github/master/.resources/git-header.svg" />
|
||||
|
||||
- 🔭 I’m currently working on Java scripts
|
||||
|
||||
- 🌱 I’m currently learning Python
|
||||
|
||||
- 👯 Sharing is caring
|
||||
|
||||
|
||||
- ⚡ CDM IS NOT INCLUDED BUY it from wvfuck@cyberfiends.net ⚡
|
||||
|
||||
|
||||
<br>
|
||||
<br>
|
||||
<br>
|
||||
|
||||
|
|
|
@ -0,0 +1,128 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Module: BAD Project
|
||||
# Created on: 01-06-2021
|
||||
# Authors: JUNi
|
||||
# Version: 1.0
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
#Common:
|
||||
parser.add_argument("content", nargs="?", help="Content URL or ID")
|
||||
parser.add_argument("--url", dest="url_season", help="If set, it will download all assets from the season provided.")
|
||||
parser.add_argument("--tqdm", dest="tqmd_mode", help="If set, will download with threading", action="store_true")
|
||||
parser.add_argument("--nv", "--no-video", dest="novideo", help="If set, don't download video", action="store_true")
|
||||
parser.add_argument("--na", "--no-audio", dest="noaudio", help="If set, don't download audio", action="store_true")
|
||||
parser.add_argument("--ns", "--no-subs", dest="nosubs", help="If set, don't download subs", action="store_true")
|
||||
parser.add_argument("--all-season", dest="all_season", help="If set, active download mode.", action="store_true")
|
||||
parser.add_argument("-e", "--episode", dest="episodeStart", help="If set, it will start downloading the season from that episode.")
|
||||
parser.add_argument("-s", dest="season", help="If set, it will download all assets from the season provided.")
|
||||
parser.add_argument("--tag", type=str, required=False, help="Release group tag to use for filenames")
|
||||
parser.add_argument("-q", "--quality", dest="customquality", type=lambda x: [x.rstrip('p')], help="For configure quality of video.", default=[])
|
||||
parser.add_argument("-o", "--output", dest="output", default="downloads", help="If set, it will download all assets to directory provided.")
|
||||
parser.add_argument("--keep", dest="keep", help="If set, it will list all formats available.", action="store_true")
|
||||
parser.add_argument("--info", help="If set, it will print manifest infos and exit.", action="store_true")
|
||||
parser.add_argument("--no-mux", dest="nomux", help="If set, dont mux.", action="store_true")
|
||||
#parser.add_argument("--force-mux", dest="force_mux", nargs=1, help="If set, force mux.", default=[])
|
||||
#parser.add_argument("--langtag", dest="langtag", nargs=1, help="For configure language tag of MKV.", default=[])
|
||||
parser.add_argument("--only-2ch-audio", dest="only_2ch_audio", help="If set, no clean tag subtitles.", action="store_true")
|
||||
parser.add_argument("--alang", "--audio-language", dest="audiolang", nargs="*", help="If set, download only selected audio languages", default=[])
|
||||
parser.add_argument("--slang", "--subtitle-language", dest="sublang", nargs="*", help="If set, download only selected subtitle languages", default=[])
|
||||
parser.add_argument("--flang", "--forced-language", dest="forcedlang", nargs="*", help="If set, download only selected forced subtitle languages", default=[])
|
||||
parser.add_argument("--no-cleansubs", dest="nocleansubs", help="If set, no clean tag subtitles.", action="store_true")
|
||||
parser.add_argument("--hevc", dest="hevc", help="If set, it will return HEVC manifest", action="store_true")
|
||||
parser.add_argument("--uhd", dest="uhd", help="If set, it will return UHD manifest", action="store_true")
|
||||
parser.add_argument("--license", dest="license", help="Only print keys, don't download", action="store_true")
|
||||
parser.add_argument("-licenses-as-json", help="Save the wv keys as json instead", action="store_true")
|
||||
parser.add_argument("--debug", action="store_true", help="Enable debug logging")
|
||||
parser.add_argument("--aformat-51ch", "--audio-format-51ch", dest="aformat_51ch", help="For configure format of audio.")
|
||||
parser.add_argument("--nc", "--no-chapters", dest="nochpaters", help="If set, don't download chapters", action="store_true")
|
||||
parser.add_argument("-c", "--codec", choices=["widevine", "playready"], help="Video type to download", default="widevine")
|
||||
|
||||
parser.add_argument("--only-keys", dest="onlykeys", help="Only print keys, don't download", action="store_true")
|
||||
|
||||
#HBOMAX
|
||||
parser.add_argument("--atmos", dest="atmos", help="If set, it will return Atmos MPDs", action="store_true")
|
||||
parser.add_argument("--ad", "--desc-audio", action="store_true", dest="desc_audio", help="Download descriptive audio instead of normal dialogue")
|
||||
parser.add_argument("--hdr", dest="hdr", help="If set, it will return HDR manifest", action="store_true")
|
||||
parser.add_argument("-r", "--region", choices=["la", "us"], help="HBO Max video region", default="la")
|
||||
parser.add_argument("--vp", dest="videocodec", default="h264", choices=["h264", "hevc", "hdr"], help="video codec profile")
|
||||
|
||||
#Clarovideo:
|
||||
parser.add_argument("--m3u8", dest="m3u8mode", help="If set, it will return M3U8 manifest", action="store_true")
|
||||
parser.add_argument("--file", dest="txtpath", help="If set, it will download links of an txt file")
|
||||
|
||||
#DisneyPlus:
|
||||
parser.add_argument("--tlang", "--title-language", dest="titlelang", nargs=1, help="If set, it will change title language", default="es-la")
|
||||
parser.add_argument("--scenario", dest="scenario", help="Video API from DisneyPlus", default="chromecast-drm-cbcs")
|
||||
|
||||
#PROXY:
|
||||
parser.add_argument("--proxy", dest="proxy", help="Proxy URL to use for both fetching metadata and downloading")
|
||||
#proxy format: http://email@email:password@host:port
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
if args.debug:
|
||||
import logging
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
currentFile = '__main__'
|
||||
realPath = os.path.realpath(currentFile)
|
||||
dirPath = os.path.dirname(realPath)
|
||||
dirName = os.path.basename(dirPath)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if args.content:
|
||||
args.url_season = args.content
|
||||
|
||||
if not args.url_season:
|
||||
print('Please specify the URL of the content to download.')
|
||||
sys.exit(1)
|
||||
|
||||
if (args.url_season and 'hbomax' in args.url_season):
|
||||
mode = 'hbomax'
|
||||
import hbomax
|
||||
hbomax.main(args)
|
||||
elif (args.url_season and 'clarovideo' in args.url_season):
|
||||
mode = 'clarovideo'
|
||||
import clarovideo
|
||||
clarovideo.main(args)
|
||||
elif (args.url_season and 'blim' in args.url_season):
|
||||
mode = 'blimtv'
|
||||
import blimtv
|
||||
blimtv.main(args)
|
||||
elif (args.url_season and 'nowonline' in args.url_season):
|
||||
mode = 'nowonline'
|
||||
import nowonline
|
||||
nowonline.main(args)
|
||||
elif (args.url_season and 'globo' in args.url_season):
|
||||
mode = 'globoplay'
|
||||
import globoplay
|
||||
globoplay.main(args)
|
||||
elif (args.url_season and 'paramountplus.com' in args.url_season):
|
||||
mode = 'paramountplus'
|
||||
import paramountplus
|
||||
paramountplus.main(args)
|
||||
elif (args.url_season and 'disneyplus' in args.url_season):
|
||||
mode = 'disneyplus'
|
||||
import disneyplus
|
||||
disneyplus.main(args)
|
||||
elif (args.url_season and 'prende.tv' in args.url_season):
|
||||
mode = 'prendetv'
|
||||
import prendetv
|
||||
prendetv.main(args)
|
||||
elif (args.url_season and 'tv.apple.com' in args.url_season):
|
||||
mode = 'appletv'
|
||||
import appletv
|
||||
appletv.main(args)
|
||||
elif (args.url_season and 'wink' in args.url_season):
|
||||
mode = 'wink'
|
||||
import wink
|
||||
wink.main(args)
|
||||
|
||||
else:
|
||||
print("Error! This url or mode is not recognized.")
|
||||
sys.exit(0)
|
|
@ -0,0 +1,594 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Module: Blim Downloader
|
||||
# Created on: 26-11-2020
|
||||
# Authors: JUNi
|
||||
# Version: 3.5
|
||||
|
||||
import requests
|
||||
import subprocess
|
||||
import xmltodict
|
||||
import ffmpy
|
||||
import time, shutil
|
||||
import glob, json
|
||||
import sys, os, re
|
||||
import isodate
|
||||
import oauthlib
|
||||
from oauthlib import oauth1
|
||||
from subprocess import Popen
|
||||
from titlecase import titlecase
|
||||
from unidecode import unidecode
|
||||
from pymediainfo import MediaInfo
|
||||
from os.path import isfile, join
|
||||
|
||||
import pywidevine.clients.blim.manifest_parse as manifestParse
|
||||
import pywidevine.clients.blim.client as blim_client
|
||||
import pywidevine.clients.blim.config as blim_cfg
|
||||
from pywidevine.clients.proxy_config import ProxyConfig
|
||||
from pywidevine.muxer.muxer import Muxer
|
||||
|
||||
from pywidevine.clients.blim.downloader_pr import PrDownloader
|
||||
from pywidevine.clients.blim.downloader_wv import WvDownloader
|
||||
from pywidevine.clients.blim.config import PrDownloaderConfig
|
||||
from pywidevine.clients.blim.config import WvDownloaderConfig
|
||||
|
||||
currentFile = 'blimtv'
|
||||
realPath = os.path.realpath(currentFile)
|
||||
dirPath = os.path.dirname(realPath)
|
||||
|
||||
def main(args):
|
||||
|
||||
SubtitleEditexe = shutil.which("subtitleedit") or shutil.which("SubtitleEdit")
|
||||
|
||||
proxies = {}
|
||||
proxy_meta = args.proxy
|
||||
if proxy_meta == 'none':
|
||||
proxies['meta'] = {'http': None, 'https': None}
|
||||
elif proxy_meta:
|
||||
proxies['meta'] = {'http': proxy_meta, 'https': proxy_meta}
|
||||
SESSION = requests.Session()
|
||||
SESSION.proxies = proxies.get('meta')
|
||||
proxy_cfg = ProxyConfig(proxies)
|
||||
|
||||
def alphanumericSort(l):
|
||||
def convert(text):
|
||||
if text.isdigit():
|
||||
return int(text)
|
||||
else:
|
||||
return text
|
||||
|
||||
def alphanum_key(key):
|
||||
return [convert(c) for c in re.split('([0-9]+)', key)]
|
||||
|
||||
return sorted(l, key=alphanum_key)
|
||||
|
||||
def convert_size(size_bytes):
|
||||
if size_bytes == 0:
|
||||
return '0bps'
|
||||
else:
|
||||
s = round(size_bytes / 1000, 0)
|
||||
return '%ikbps' % s
|
||||
|
||||
def get_size(size):
|
||||
power = 1024
|
||||
n = 0
|
||||
Dic_powerN = {0:'', 1:'K', 2:'M', 3:'G', 4:'T'}
|
||||
while size > power:
|
||||
size /= power
|
||||
n += 1
|
||||
return str(round(size, 2)) + Dic_powerN[n] + 'B'
|
||||
|
||||
def find_str(s, char):
|
||||
index = 0
|
||||
if char in s:
|
||||
c = char[0]
|
||||
for ch in s:
|
||||
if ch == c:
|
||||
if s[index:index + len(char)] == char:
|
||||
return index
|
||||
index += 1
|
||||
|
||||
return -1
|
||||
|
||||
def getKeyId(name):
|
||||
mp4dump = subprocess.Popen([blim_cfg.MP4DUMP, name], stdout=(subprocess.PIPE))
|
||||
mp4dump = str(mp4dump.stdout.read())
|
||||
A = find_str(mp4dump, 'default_KID')
|
||||
KEY_ID_ORI = ''
|
||||
KEY_ID_ORI = mp4dump[A:A + 63].replace('default_KID = ', '').replace('[', '').replace(']', '').replace(' ', '')
|
||||
if KEY_ID_ORI == '' or KEY_ID_ORI == "'":
|
||||
KEY_ID_ORI = 'nothing'
|
||||
return KEY_ID_ORI
|
||||
|
||||
def replace_words(x):
|
||||
x = re.sub(r'[]¡!"#$%\'()*+,:;<=>¿?@\\^_`{|}~[-]', '', x)
|
||||
return unidecode(x)
|
||||
|
||||
def downloadFile2(link, file_name):
|
||||
with open(file_name, 'wb') as (f):
|
||||
print('\nDownloading %s' % file_name)
|
||||
response = requests.get(link, stream=True)
|
||||
total_length = response.headers.get('content-length')
|
||||
if total_length is None:
|
||||
f.write(response.content)
|
||||
else:
|
||||
dl = 0
|
||||
total_length = int(total_length)
|
||||
for data in response.iter_content(chunk_size=4096):
|
||||
dl += len(data)
|
||||
f.write(data)
|
||||
done = int(50 * dl / total_length)
|
||||
sys.stdout.write('\r[%s%s]' % ('=' * done, ' ' * (50 - done)))
|
||||
sys.stdout.flush()
|
||||
|
||||
global folderdownloader
|
||||
if args.output:
|
||||
if not os.path.exists(args.output):
|
||||
os.makedirs(args.output)
|
||||
os.chdir(args.output)
|
||||
if ":" in str(args.output):
|
||||
folderdownloader = str(args.output).replace('/','\\').replace('.\\','\\')
|
||||
else:
|
||||
folderdownloader = dirPath + str(args.output).replace('/','\\').replace('.\\','\\')
|
||||
else:
|
||||
folderdownloader = dirPath.replace('/','\\').replace('.\\','\\')
|
||||
|
||||
def manifest_parse(manifest_url):
|
||||
r = SESSION.get(url=manifest_url)
|
||||
r.raise_for_status()
|
||||
xml = xmltodict.parse(r.text)
|
||||
manifest = json.loads(json.dumps(xml))
|
||||
if '.mpd' in manifest_url:
|
||||
length, video_list, audio_list, subs_list = manifestParse.get_mpd_list(manifest)
|
||||
base_url = manifest['MPD']['Period']['BaseURL']
|
||||
else:
|
||||
length, video_list, audio_list, subs_list = manifestParse.get_ism_list(manifest)
|
||||
base_url = manifest_url.split('Manifest')[0]
|
||||
|
||||
video_list = sorted(video_list, key=(lambda k: int(k['Bandwidth'])))
|
||||
height_all = []
|
||||
for x in video_list:
|
||||
height_all.append(x['Height'])
|
||||
|
||||
try:
|
||||
while args.customquality != [] and int(video_list[(-1)]['Height']) > int(args.customquality[0]):
|
||||
video_list.pop(-1)
|
||||
except Exception:
|
||||
video_list = []
|
||||
|
||||
if video_list == []:
|
||||
video_list = video_list
|
||||
args.novideo = True
|
||||
|
||||
audio_list = sorted(audio_list, key=(lambda k: (int(k['Bandwidth']), str(k['Language']))), reverse=True)
|
||||
BitrateList = []
|
||||
AudioLanguageList = []
|
||||
for x in audio_list:
|
||||
BitrateList.append(x['Bandwidth'])
|
||||
AudioLanguageList.append(x['Language'])
|
||||
|
||||
BitrateList = alphanumericSort(list(set(BitrateList)))
|
||||
AudioLanguageList = alphanumericSort(list(set(AudioLanguageList)))
|
||||
audioList_new = []
|
||||
audio_Dict_new = {}
|
||||
for y in AudioLanguageList:
|
||||
counter = 0
|
||||
for x in audio_list:
|
||||
if x['Language'] == y and counter == 0:
|
||||
audio_Dict_new = {
|
||||
'Language':x['Language'],
|
||||
'Bandwidth':x['Bandwidth'],
|
||||
'ID':x['ID'],
|
||||
'Codec': x['Codec']}
|
||||
audioList_new.append(audio_Dict_new)
|
||||
counter = counter + 1
|
||||
|
||||
audioList = audioList_new
|
||||
audio_list = sorted(audioList, key=(lambda k: (int(k['Bandwidth']), str(k['Language']))))
|
||||
|
||||
subs_list = []
|
||||
subsList_new = []
|
||||
if args.sublang:
|
||||
for x in subs_list:
|
||||
langAbbrev = x['Language']
|
||||
if langAbbrev in list(args.sublang):
|
||||
subsList_new.append(x)
|
||||
subs_list = subsList_new
|
||||
|
||||
return base_url, length, video_list, audio_list, subs_list, manifest
|
||||
|
||||
def get_episodes(ep_str, num_eps):
|
||||
eps = ep_str.split(',')
|
||||
eps_final = []
|
||||
|
||||
for ep in eps:
|
||||
if '-' in ep:
|
||||
(start, end) = ep.split('-')
|
||||
start = int(start)
|
||||
end = int(end or num_eps)
|
||||
eps_final += list(range(start, end + 1))
|
||||
else:
|
||||
eps_final.append(int(ep))
|
||||
|
||||
return eps_final
|
||||
|
||||
tokenIsOk = False
|
||||
os.makedirs(blim_cfg.COOKIES_FOLDER, exist_ok=True)
|
||||
BLIMLOGINDATA_FILE = join(blim_cfg.COOKIES_FOLDER, 'blim_login_data.json')
|
||||
SESSION, costumer_key, access_key_secret = blim_client.login(SESSION)
|
||||
|
||||
def get_auth_header(api_url):
|
||||
client = oauthlib.oauth1.Client(costumer_key, client_secret=access_key_secret)
|
||||
uri, auth_header, body = client.sign(api_url)
|
||||
return auth_header
|
||||
|
||||
def get_season(blim_id):
|
||||
season_req = requests.get(url=blim_cfg.ENDPOINTS['seasons'] + str(blim_id)).json()['data']
|
||||
|
||||
if 'episode' in season_req['category'] and args.season:
|
||||
blim_id = season_req['parentShow']['id']
|
||||
season_req = requests.get(url=blim_cfg.ENDPOINTS['seasons'] + str(blim_id)).json()['data']
|
||||
|
||||
if not args.season:
|
||||
args.season = 'all'
|
||||
|
||||
seasons = []
|
||||
if args.season:
|
||||
if args.season == 'all':
|
||||
seasons = 'all'
|
||||
elif ',' in args.season:
|
||||
seasons = [int(x) for x in args.season.split(',')]
|
||||
elif '-' in args.season:
|
||||
(start, end) = args.season.split('-')
|
||||
seasons = list(range(int(start), int(end) + 1))
|
||||
else:
|
||||
seasons = [int(args.season)]
|
||||
|
||||
if 'series' in season_req['category']:
|
||||
if seasons == 'all':
|
||||
seasons = [x['number'] for x in season_req['seasons']]
|
||||
|
||||
for season_num in seasons:
|
||||
episode_list = season_req['seasons'][int(season_num) - 1]['episodes']
|
||||
episodes_list_new = []
|
||||
for num, ep in enumerate(episode_list, start=1):
|
||||
episodes_list_new.insert(num - 0, {
|
||||
'id': ep['id'],
|
||||
'episode_num': num
|
||||
})
|
||||
episode_list = sorted(episodes_list_new, key=lambda x: x['episode_num'])
|
||||
|
||||
if args.episodeStart:
|
||||
eps = get_episodes(args.episodeStart, len(episode_list))
|
||||
episode_list = [x for x in episode_list if x['episode_num'] in eps]
|
||||
|
||||
for episode in episode_list:
|
||||
get_metadata(blim_id=episode['id'])
|
||||
|
||||
else:
|
||||
get_metadata(blim_id)
|
||||
|
||||
def get_metadata(blim_id):
|
||||
content_url = blim_cfg.ENDPOINTS['content'] + str(blim_id)
|
||||
info_json = requests.get(url=content_url, headers=get_auth_header(content_url), proxies=proxy_cfg.get_proxy('meta')).json()
|
||||
if 'episode' in info_json['data']['category']:
|
||||
blimType = "show"
|
||||
seriesTitles = info_json['data']['parentShow']['titleEditorial']
|
||||
seasonNumber = info_json['data']['parentSeason']['number']
|
||||
episodeNumber = info_json['data']['episodeNumber']
|
||||
episodeTitle = info_json['data']['titleEditorial']
|
||||
|
||||
if 'movie' in info_json['data']['category']:
|
||||
blimType = "movie"
|
||||
seriesTitles = info_json['data']['titleEditorial']
|
||||
releaseYearSearch = info_json['data']['airDate']
|
||||
releaseYear = re.search(r"^[0-9]{4}", releaseYearSearch)
|
||||
|
||||
if blimType=="movie":
|
||||
seriesName = replace_words(seriesTitles) + ' (' + releaseYear.group() + ')'
|
||||
folderName = None
|
||||
|
||||
if blimType=="show":
|
||||
seriesName = f'{replace_words(seriesTitles)} S{seasonNumber:02}E{episodeNumber:02} - {replace_words(episodeTitle)}'
|
||||
folderName = f'{replace_words(seriesTitles)} S{seasonNumber:02}'
|
||||
|
||||
start_process(get_manifest_url(info_json), seriesName, folderName, blimType)
|
||||
|
||||
codec = "mpd" if args.codec == "widevine" else "ss"
|
||||
|
||||
def get_manifest_url(api_json):
|
||||
video_json = api_json['data']['videos'][0]['files']
|
||||
for x in video_json:
|
||||
if x['type'] == codec:
|
||||
videoURL = x['path'].replace("AVOD.", "")
|
||||
if 'ss_629d09c4372f297f2760c820711c4d4737b14f26c25c55e58f1147819005089e' in videoURL or "468842.mpd" in videoURL:
|
||||
print("Lo sentimos, por el momento Blim no está disponible en tu país")
|
||||
sys.exit(0)
|
||||
break
|
||||
return videoURL
|
||||
|
||||
def get_drm_info(): # não está utilizando
|
||||
resp = requests.get(url=blim_cfg.ENDPOINTS['config'], proxies=proxy_cfg.get_proxy('meta')).json()
|
||||
wvlic = resp['widevineLicenseServer']
|
||||
wvcert = resp['widevineCertificateServer']
|
||||
return wvlic, wvcert
|
||||
|
||||
def start_process(manifest_url, seriesName, folderName, blimType):
|
||||
base_url, length, video_list, audio_list, subs_list, xml = manifest_parse(manifest_url)
|
||||
video_bandwidth = dict(video_list[(-1)])['Bandwidth']
|
||||
video_height = str(dict(video_list[(-1)])['Height'])
|
||||
video_width = str(dict(video_list[(-1)])['Width'])
|
||||
video_codec = str(dict(video_list[(-1)])['Codec'])
|
||||
video_format_id = str(dict(video_list[(-1)])['ID'])
|
||||
if not args.novideo:
|
||||
print('\nVIDEO - Bitrate: ' + convert_size(int(video_bandwidth)) + ' - Profile: ' + video_codec.split('=')[0] + ' - Size: ' + get_size(length * float(video_bandwidth) * 0.125) + ' - Dimensions: ' + video_width + 'x' + video_height)
|
||||
print()
|
||||
|
||||
if not args.noaudio:
|
||||
if audio_list != []:
|
||||
for x in audio_list:
|
||||
audio_bandwidth = x['Bandwidth']
|
||||
audio_representation_id = str(x['Codec'])
|
||||
audio_lang = x['Language']
|
||||
print('AUDIO - Bitrate: ' + convert_size(int(audio_bandwidth)) + ' - Profile: ' + audio_representation_id.split('=')[0] + ' - Size: ' + get_size(length * float(audio_bandwidth) * 0.125) + ' - Language: ' + audio_lang)
|
||||
print()
|
||||
|
||||
if not args.nosubs:
|
||||
if subs_list != []:
|
||||
for z in subs_list:
|
||||
sub_lang = str(dict(z)['Language'])
|
||||
print('SUBTITLE - Profile: Normal - Language: ' + sub_lang)
|
||||
print()
|
||||
|
||||
print('Name: ' + seriesName)
|
||||
|
||||
if blimType == 'show':
|
||||
CurrentName = seriesName
|
||||
CurrentHeigh = str(video_height)
|
||||
outputName = folderdownloader + '\\' + str(folderName) + str(CurrentName) + ' [' + str(CurrentHeigh) + 'p].mkv'
|
||||
else:
|
||||
CurrentName = seriesName
|
||||
CurrentHeigh = str(video_height)
|
||||
outputName = folderdownloader + str(CurrentName) + ' [' + str(CurrentHeigh) + 'p].mkv'
|
||||
|
||||
if 'ism' in manifest_url:
|
||||
if video_height == "1080":
|
||||
init_url = blim_cfg.init_files["1080p"]
|
||||
elif video_height == "480":
|
||||
init_url = blim_cfg.init_files["480p"]
|
||||
|
||||
if not os.path.isfile(outputName):
|
||||
|
||||
if not args.novideo:
|
||||
inputVideo = seriesName + ' [' + str(CurrentHeigh) + 'p].mp4'
|
||||
if os.path.isfile(inputVideo):
|
||||
print('\n' + inputVideo + '\nFile has already been successfully downloaded previously.\n')
|
||||
else:
|
||||
if args.codec == 'playready':
|
||||
prdl_cfg = PrDownloaderConfig(xml, base_url, inputVideo, video_bandwidth, init_url, 'video')
|
||||
downloader = PrDownloader(prdl_cfg)
|
||||
else:
|
||||
wvdl_cfg = WvDownloaderConfig(xml, base_url, inputVideo, video_format_id, 'video/mp4')
|
||||
downloader = WvDownloader(wvdl_cfg)
|
||||
downloader.run()
|
||||
|
||||
if not args.noaudio:
|
||||
for x in audio_list:
|
||||
audio_lang = x['Language']
|
||||
inputAudio = seriesName + ' ' + '(' + audio_lang + ')' + '.mp4'
|
||||
inputAudio_demuxed = seriesName + ' ' + '(' + audio_lang + ')' + '.m4a'
|
||||
if os.path.isfile(inputAudio) or os.path.isfile(inputAudio_demuxed):
|
||||
print('\n' + inputAudio + '\nFile has already been successfully downloaded previously.\n')
|
||||
else:
|
||||
if args.codec == 'playready':
|
||||
prdl_cfg = PrDownloaderConfig(xml, base_url, inputAudio, x['Bandwidth'], blim_cfg.init_files["audio"], 'audio')
|
||||
downloader = PrDownloader(prdl_cfg)
|
||||
else:
|
||||
wvdl_cfg = WvDownloaderConfig(xml, base_url, inputAudio, x['ID'], 'audio/mp4')
|
||||
downloader = WvDownloader(wvdl_cfg)
|
||||
downloader.run()
|
||||
|
||||
if not args.nosubs:
|
||||
if subs_list != []:
|
||||
for z in subs_list:
|
||||
langAbbrev = str(dict(z)['Language'])
|
||||
inputSub = seriesName + " " + "(" + langAbbrev + ")"
|
||||
if os.path.isfile(inputSub + ".vtt") or os.path.isfile(inputSub + ".srt"):
|
||||
print("\n" + inputSub + "\nFile has already been successfully downloaded previously.\n")
|
||||
else:
|
||||
downloadFile2(str(dict(z)['File_URL']), inputSub + ".vtt")
|
||||
print('\nConverting subtitles...')
|
||||
SubtitleEdit_process = subprocess.Popen([SubtitleEditexe, "/convert", inputSub + ".vtt", "srt", "/fixcommonerrors", "/encoding:utf-8", "/RemoveLineBreaks"], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).wait()
|
||||
for f in glob.glob(inputSub + ".vtt"):
|
||||
os.remove(f)
|
||||
print("Done!")
|
||||
else:
|
||||
print ("\nNo subtitles available.")
|
||||
|
||||
CorrectDecryptVideo = False
|
||||
if not args.novideo:
|
||||
inputVideo = seriesName + ' [' + str(CurrentHeigh) + 'p].mp4'
|
||||
if os.path.isfile(inputVideo):
|
||||
CorrectDecryptVideo = DecryptVideo(inputVideo=inputVideo, keys_video=blim_cfg.protection_keys)
|
||||
else:
|
||||
CorrectDecryptVideo = True
|
||||
|
||||
CorrectDecryptAudio = False
|
||||
if not args.noaudio:
|
||||
for x in audio_list:
|
||||
audio_lang = x['Language']
|
||||
inputAudio = seriesName + ' ' + '(' + audio_lang + ')' + '.mp4'
|
||||
if os.path.isfile(inputAudio):
|
||||
CorrectDecryptAudio = DecryptAudio(inputAudio=inputAudio, keys_audio=blim_cfg.protection_keys)
|
||||
else:
|
||||
CorrectDecryptAudio = True
|
||||
|
||||
if not args.nomux:
|
||||
if not args.novideo:
|
||||
if not args.noaudio:
|
||||
if CorrectDecryptVideo == True:
|
||||
if CorrectDecryptAudio == True:
|
||||
print('\nMuxing...')
|
||||
|
||||
if blimType=="show":
|
||||
MKV_Muxer=Muxer(CurrentName=CurrentName,
|
||||
SeasonFolder=folderName,
|
||||
CurrentHeigh=CurrentHeigh,
|
||||
Type=blimType,
|
||||
mkvmergeexe=blim_cfg.MKVMERGE)
|
||||
|
||||
else:
|
||||
MKV_Muxer=Muxer(CurrentName=CurrentName,
|
||||
SeasonFolder=None,
|
||||
CurrentHeigh=CurrentHeigh,
|
||||
Type=blimType,
|
||||
mkvmergeexe=blim_cfg.MKVMERGE)
|
||||
|
||||
MKV_Muxer.mkvmerge_muxer(lang="English")
|
||||
|
||||
if args.tag:
|
||||
inputName = CurrentName + ' [' + CurrentHeigh + 'p].mkv'
|
||||
release_group(base_filename=inputName,
|
||||
default_filename=CurrentName,
|
||||
folder_name=folderName,
|
||||
type=blimType,
|
||||
video_height=CurrentHeigh)
|
||||
|
||||
if not args.keep:
|
||||
for f in os.listdir():
|
||||
if re.fullmatch(re.escape(CurrentName) + r'.*\.(mp4|m4a|h264|h265|eac3|srt|txt|avs|lwi|mpd)', f):
|
||||
os.remove(f)
|
||||
print("Done!")
|
||||
else:
|
||||
print("\nFile '" + str(outputName) + "' already exists.")
|
||||
|
||||
def release_group(base_filename, default_filename, folder_name, type, video_height):
|
||||
if type=='show':
|
||||
video_mkv = os.path.join(folder_name, base_filename)
|
||||
else:
|
||||
video_mkv = base_filename
|
||||
|
||||
mediainfo = MediaInfo.parse(video_mkv)
|
||||
video_info = next(x for x in mediainfo.tracks if x.track_type == "Video")
|
||||
video_format = video_info.format
|
||||
|
||||
video_codec = ''
|
||||
if video_format == "AVC":
|
||||
video_codec = 'H.264'
|
||||
elif video_format == "HEVC":
|
||||
video_codec = 'H.265'
|
||||
|
||||
audio_info = next(x for x in mediainfo.tracks if x.track_type == "Audio")
|
||||
codec_name = audio_info.format
|
||||
channels_number = int(audio_info.other_channel_positions[0].split('/')[0])
|
||||
|
||||
audio_codec = ''
|
||||
audio_channels = ''
|
||||
if codec_name == "AAC":
|
||||
audio_codec = "AAC"
|
||||
elif codec_name == "AC-3":
|
||||
audio_codec = "DD"
|
||||
elif codec_name == "E-AC-3":
|
||||
audio_codec = "DDP"
|
||||
elif codec_name == "E-AC-3 JOC":
|
||||
audio_codec = "ATMOS"
|
||||
|
||||
if channels_number == "2":
|
||||
audio_channels = "2.0"
|
||||
elif channels_number == "6":
|
||||
audio_channels = "5.1"
|
||||
|
||||
audio_ = audio_codec + audio_channels
|
||||
|
||||
# renomear arquivo
|
||||
default_filename = default_filename.replace('&', '.and.')
|
||||
default_filename = re.sub(r'[]!"#$%\'()*+,:;<=>?@\\^_`{|}~[-]', '', default_filename)
|
||||
default_filename = default_filename.replace(' ', '.')
|
||||
default_filename = re.sub(r'\.{2,}', '.', default_filename)
|
||||
default_filename = unidecode(default_filename)
|
||||
|
||||
output_name = '{}.{}p.BLIM.WEB-DL.{}.{}-{}'.format(default_filename, video_height, audio_, video_codec, args.tag)
|
||||
if type=='show':
|
||||
outputName = os.path.join(folder_name, output_name + '.mkv')
|
||||
else:
|
||||
outputName = output_name + '.mkv'
|
||||
|
||||
os.rename(video_mkv, outputName)
|
||||
print("{} -> {}".format(base_filename, output_name))
|
||||
|
||||
def DecryptAudio(inputAudio, keys_audio):
|
||||
key_audio_id_original = getKeyId(inputAudio)
|
||||
outputAudioTemp = inputAudio.replace(".mp4", "_dec.mp4")
|
||||
if key_audio_id_original != "nothing":
|
||||
for key in keys_audio:
|
||||
key_id=key[0:32]
|
||||
if key_id == key_audio_id_original:
|
||||
print("\nDecrypting audio...")
|
||||
print ("Using KEY: " + key)
|
||||
wvdecrypt_process = subprocess.Popen([blim_cfg.MP4DECRYPT, "--show-progress", "--key", key, inputAudio, outputAudioTemp])
|
||||
stdoutdata, stderrdata = wvdecrypt_process.communicate()
|
||||
wvdecrypt_process.wait()
|
||||
time.sleep (50.0/1000.0)
|
||||
os.remove(inputAudio)
|
||||
print("\nDemuxing audio...")
|
||||
mediainfo = MediaInfo.parse(outputAudioTemp)
|
||||
audio_info = next(x for x in mediainfo.tracks if x.track_type == "Audio")
|
||||
codec_name = audio_info.format
|
||||
|
||||
ext = ''
|
||||
if codec_name == "AAC":
|
||||
ext = '.m4a'
|
||||
elif codec_name == "E-AC-3":
|
||||
ext = ".eac3"
|
||||
elif codec_name == "AC-3":
|
||||
ext = ".ac3"
|
||||
outputAudio = outputAudioTemp.replace("_dec.mp4", ext)
|
||||
print("{} -> {}".format(outputAudioTemp, outputAudio))
|
||||
ff = ffmpy.FFmpeg(executable=blim_cfg.FFMPEG, inputs={outputAudioTemp: None}, outputs={outputAudio: '-c copy'}, global_options="-y -hide_banner -loglevel warning")
|
||||
ff.run()
|
||||
time.sleep (50.0/1000.0)
|
||||
os.remove(outputAudioTemp)
|
||||
print("Done!")
|
||||
return True
|
||||
|
||||
elif key_audio_id_original == "nothing":
|
||||
return True
|
||||
|
||||
def DecryptVideo(inputVideo, keys_video):
|
||||
key_video_id_original = getKeyId(inputVideo)
|
||||
inputVideo = inputVideo
|
||||
outputVideoTemp = inputVideo.replace('.mp4', '_dec.mp4')
|
||||
outputVideo = inputVideo
|
||||
if key_video_id_original != 'nothing':
|
||||
for key in keys_video:
|
||||
key_id = key[0:32]
|
||||
if key_id == key_video_id_original:
|
||||
print('\nDecrypting video...')
|
||||
print('Using KEY: ' + key)
|
||||
wvdecrypt_process = subprocess.Popen([blim_cfg.MP4DECRYPT, '--show-progress', '--key', key, inputVideo, outputVideoTemp])
|
||||
stdoutdata, stderrdata = wvdecrypt_process.communicate()
|
||||
wvdecrypt_process.wait()
|
||||
print('\nRemuxing video...')
|
||||
ff = ffmpy.FFmpeg(executable=blim_cfg.FFMPEG, inputs={outputVideoTemp: None}, outputs={outputVideo: '-c copy'}, global_options='-y -hide_banner -loglevel warning')
|
||||
ff.run()
|
||||
time.sleep(0.05)
|
||||
os.remove(outputVideoTemp)
|
||||
print('Done!')
|
||||
return True
|
||||
|
||||
elif key_video_id_original == 'nothing':
|
||||
return True
|
||||
|
||||
def id_parse(x):
|
||||
if 'player' in args.url_season:
|
||||
id_ = args.url_season.split('/')[-2]
|
||||
else:
|
||||
id_ = args.url_season.split('/')[-1]
|
||||
return id_
|
||||
|
||||
blim_id = id_parse(args.url_season)
|
||||
|
||||
if 'player' in args.url_season:
|
||||
get_metadata(blim_id)
|
||||
else:
|
||||
get_season(blim_id)
|
||||
|
|
@ -0,0 +1,949 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Module: HBO Max Downloader
|
||||
# Created on: 04-11-2020
|
||||
# Version: 3.5
|
||||
|
||||
import sys, os
|
||||
import subprocess, re, base64, requests
|
||||
import xmltodict, isodate
|
||||
import time, glob, uuid, ffmpy, json
|
||||
import shutil, urllib.parse
|
||||
|
||||
from unidecode import unidecode
|
||||
|
||||
import pywidevine.clients.hbomax.config as HMAXConfig
|
||||
import pywidevine.clients.hbomax.client as HMAXClient
|
||||
|
||||
from pywidevine.clients.hbomax.config import HMAXRegion
|
||||
from pywidevine.clients.proxy_config import ProxyConfig
|
||||
from pywidevine.muxer.muxer import Muxer
|
||||
from os.path import join, isfile
|
||||
|
||||
currentFile = 'hbomax'
|
||||
realPath = os.path.realpath(currentFile)
|
||||
dirPath = os.path.dirname(realPath)
|
||||
SESSION = requests.session()
|
||||
USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.77 Safari/537.36"
|
||||
|
||||
def main(args):
|
||||
|
||||
proxies = {}
|
||||
proxy_meta = args.proxy
|
||||
if proxy_meta == 'none':
|
||||
proxies['meta'] = {'http': None, 'https': None}
|
||||
elif proxy_meta:
|
||||
proxies['meta'] = {'http': proxy_meta, 'https': proxy_meta}
|
||||
SESSION.proxies = proxies.get('meta')
|
||||
proxy_cfg = ProxyConfig(proxies)
|
||||
|
||||
if not os.path.exists(dirPath + '/KEYS'):
|
||||
os.makedirs(dirPath + '/KEYS')
|
||||
else:
|
||||
keys_file = dirPath + '/KEYS/HBOMAX.txt'
|
||||
try:
|
||||
keys_file_hbomax = open(keys_file, 'r', encoding='utf8')
|
||||
keys_file_txt = keys_file_hbomax.readlines()
|
||||
except Exception:
|
||||
with open(keys_file, 'a', encoding='utf8') as (file):
|
||||
file.write('##### One KEY per line. #####\n')
|
||||
keys_file_hbomax = open(keys_file, 'r', encoding='utf8')
|
||||
keys_file_txt = keys_file_hbomax.readlines()
|
||||
|
||||
global folderdownloader
|
||||
if args.output:
|
||||
if not os.path.exists(args.output):
|
||||
os.makedirs(args.output)
|
||||
os.chdir(args.output)
|
||||
if ":" in str(args.output):
|
||||
folderdownloader = str(args.output).replace('/','\\').replace('.\\','\\')
|
||||
else:
|
||||
folderdownloader = dirPath + '\\' + str(args.output).replace('/','\\').replace('.\\','\\')
|
||||
else:
|
||||
folderdownloader = dirPath.replace('/','\\').replace('.\\','\\')
|
||||
|
||||
def downloadFile(aria2c_infile):
|
||||
aria2c_opts = [
|
||||
HMAXConfig.ARIA2C,
|
||||
'--allow-overwrite=true',
|
||||
'--download-result=hide',
|
||||
'--console-log-level=warn',
|
||||
'--enable-color=false',
|
||||
'-x16', '-s16', '-j16',
|
||||
'-i', aria2c_infile]
|
||||
subprocess.run(aria2c_opts, check=True)
|
||||
|
||||
def downloadFile2(link, file_name):
|
||||
with open(file_name, 'wb') as (f):
|
||||
print(file_name)
|
||||
response = SESSION.get(link, stream=True)
|
||||
total_length = response.headers.get('content-length')
|
||||
if total_length is None:
|
||||
f.write(response.content)
|
||||
else:
|
||||
dl = 0
|
||||
total_length = int(total_length)
|
||||
for data in response.iter_content(chunk_size=4096):
|
||||
dl += len(data)
|
||||
f.write(data)
|
||||
|
||||
def find_str(s, char):
|
||||
index = 0
|
||||
|
||||
if char in s:
|
||||
c = char[0]
|
||||
for ch in s:
|
||||
if ch == c:
|
||||
if s[index:index+len(char)] == char:
|
||||
return index
|
||||
|
||||
index += 1
|
||||
|
||||
return -1
|
||||
|
||||
def getKeyId(name):
|
||||
mp4dump = subprocess.Popen([HMAXConfig.MP4DUMP, name], stdout=subprocess.PIPE)
|
||||
mp4dump = str(mp4dump.stdout.read())
|
||||
A=find_str(mp4dump, "default_KID")
|
||||
KEY_ID_ORI=mp4dump[A:A+63].replace("default_KID = ", "").replace("[", "").replace("]", "").replace(" ", "")
|
||||
if KEY_ID_ORI == "":
|
||||
KEY_ID_ORI = "nothing"
|
||||
return KEY_ID_ORI
|
||||
|
||||
def mediainfo_(file):
|
||||
mediainfo_output = subprocess.Popen([HMAXConfig.MEDIAINFO, '--Output=JSON', '-f', file], stdout=subprocess.PIPE)
|
||||
mediainfo_json = json.load(mediainfo_output.stdout)
|
||||
return mediainfo_json
|
||||
|
||||
def replace_words(x):
|
||||
x = re.sub(r'[]¡!"#$%\'()*+,:;<=>¿?@\\^_`{|}~[-]', '', x)
|
||||
x = x.replace('\\', '').replace('/', ' & ')
|
||||
return unidecode(x)
|
||||
|
||||
def ReplaceCodeLanguages(X):
|
||||
X = X.lower()
|
||||
X = X.replace('_subtitle_dialog_0', '').replace('_narrative_dialog_0', '').replace('_caption_dialog_0', '').replace('_dialog_0', '').replace('_descriptive_0', '_descriptive').replace('_descriptive', '_descriptive').replace('_sdh', '-sdh').replace('es-es', 'es').replace('SPA', 'es').replace('en-es', 'es').replace('kn-in', 'kn').replace('gu-in', 'gu').replace('ja-jp', 'ja').replace('mni-in', 'mni').replace('si-in', 'si').replace('as-in', 'as').replace('ml-in', 'ml').replace('sv-se', 'sv').replace('hy-hy', 'hy').replace('sv-sv', 'sv').replace('da-da', 'da').replace('fi-fi', 'fi').replace('nb-nb', 'nb').replace('is-is', 'is').replace('uk-uk', 'uk').replace('hu-hu', 'hu').replace('bg-bg', 'bg').replace('hr-hr', 'hr').replace('lt-lt', 'lt').replace('et-et', 'et').replace('el-el', 'el').replace('he-he', 'he').replace('ar-ar', 'ar').replace('fa-fa', 'fa').replace('ENG', 'en').replace('ro-ro', 'ro').replace('sr-sr', 'sr').replace('cs-cs', 'cs').replace('sk-sk', 'sk').replace('mk-mk', 'mk').replace('hi-hi', 'hi').replace('bn-bn', 'bn').replace('ur-ur', 'ur').replace('pa-pa', 'pa').replace('ta-ta', 'ta').replace('te-te', 'te').replace('mr-mr', 'mr').replace('kn-kn', 'kn').replace('gu-gu', 'gu').replace('ml-ml', 'ml').replace('si-si', 'si').replace('as-as', 'as').replace('mni-mni', 'mni').replace('tl-tl', 'tl').replace('id-id', 'id').replace('ms-ms', 'ms').replace('vi-vi', 'vi').replace('th-th', 'th').replace('km-km', 'km').replace('ko-ko', 'ko').replace('zh-zh', 'zh').replace('ja-ja', 'ja').replace('ru-ru', 'ru').replace('tr-tr', 'tr').replace('it-it', 'it').replace('es-mx', 'es-la').replace('ar-sa', 'ar').replace('zh-cn', 'zh').replace('nl-nl', 'nl').replace('pl-pl', 'pl').replace('pt-pt', 'pt').replace('hi-in', 'hi').replace('mr-in', 'mr').replace('bn-in', 'bn').replace('te-in', 'te').replace('POR', 'pt').replace('cmn-hans', 'zh-hans').replace('cmn-hant', 'zh-hant').replace('ko-kr', 'ko').replace('en-au', 'en').replace('es-419', 'es-la').replace('es-us', 'es-la').replace('en-us', 'en').replace('en-gb', 'en').replace('fr-fr', 'fr').replace('de-de', 'de').replace('las-419', 'es-la').replace('ar-ae', 'ar').replace('da-dk', 'da').replace('yue-hant', 'yue').replace('bn-in', 'bn').replace('ur-in', 'ur').replace('ta-in', 'ta').replace('sl-si', 'sl').replace('cs-cz', 'cs').replace('hi-jp', 'hi').replace('-001', '').replace('en-US', 'en').replace('deu', 'de').replace('eng', 'en').replace('ca-es', 'cat').replace('fil-ph', 'fil').replace('en-ca', 'en').replace('eu-es', 'eu').replace('ar-eg', 'ar').replace('he-il', 'he').replace('el-gr', 'he').replace('nb-no', 'nb').replace('es-ar', 'es-la').replace('en-ph', 'en').replace('sq-al', 'sq').replace('bs-ba', 'bs')
|
||||
return X
|
||||
|
||||
def alphanumericSort(l):
|
||||
def convert(text):
|
||||
if text.isdigit():
|
||||
return int(text)
|
||||
else:
|
||||
return text
|
||||
|
||||
def alphanum_key(key):
|
||||
return [convert(c) for c in re.split('([0-9]+)', key)]
|
||||
|
||||
return sorted(l, key=alphanum_key)
|
||||
|
||||
def convert_size(size_bytes):
|
||||
if size_bytes == 0:
|
||||
return '0bps'
|
||||
else:
|
||||
s = round(size_bytes / 1000, 0)
|
||||
return '%ikbps' % s
|
||||
|
||||
def get_size(size):
|
||||
power = 1024
|
||||
n = 0
|
||||
Dic_powerN = {0:'', 1:'K', 2:'M', 3:'G', 4:'T'}
|
||||
while size > power:
|
||||
size /= power
|
||||
n += 1
|
||||
return str(round(size, 2)) + Dic_powerN[n] + 'B'
|
||||
|
||||
global auth_url, content_url, license_wv
|
||||
|
||||
if args.region == "la":
|
||||
auth_url, content_url, license_wv = HMAXRegion.configHBOMaxLatam()
|
||||
|
||||
if args.region == "us":
|
||||
auth_url, content_url, license_wv = HMAXRegion.configHBOMaxUS()
|
||||
|
||||
def get_authorization_header(TOKEN):
|
||||
headers = HMAXConfig.get_user_headers()['headers']
|
||||
|
||||
headers = {
|
||||
"accept": "application/vnd.hbo.v9.full+json",
|
||||
"accept-encoding": "gzip, deflate, br",
|
||||
"accept-language": str(args.titlelang),
|
||||
"Authorization": f"Bearer {TOKEN}",
|
||||
"user-agent": HMAXConfig.UA,
|
||||
"x-hbo-client-version": "Hadron/50.40.0.111 desktop (DESKTOP)",
|
||||
"x-hbo-device-name": "desktop",
|
||||
"x-hbo-device-os-version": "undefined"
|
||||
}
|
||||
return headers
|
||||
|
||||
os.makedirs(HMAXConfig.COOKIES_FOLDER, exist_ok=True)
|
||||
HMAXTOKEN_FILE = join(HMAXConfig.COOKIES_FOLDER, 'hmax_login_data.json')
|
||||
if not isfile(HMAXTOKEN_FILE):
|
||||
access_token = HMAXClient.login(SESSION, auth_url, content_url)
|
||||
|
||||
def refresh_token():
|
||||
content = None
|
||||
TOKEN = False
|
||||
with open(HMAXTOKEN_FILE,'rb') as f:
|
||||
content = f.read().decode('utf-8')
|
||||
jso = json.loads(content)
|
||||
token_exp = int(time.time()) - jso["EXPIRATION_TIME"]
|
||||
if int(token_exp/60) > 15:
|
||||
TOKEN = False
|
||||
elif int(token_exp/60) < 15:
|
||||
TOKEN = True
|
||||
|
||||
if TOKEN:
|
||||
access_token = jso['ACCESS_TOKEN']
|
||||
|
||||
if not TOKEN:
|
||||
access_token = HMAXClient.login(SESSION, auth_url, content_url)
|
||||
return get_authorization_header(access_token)
|
||||
|
||||
def mpd_parse(mpd_url):
|
||||
if args.atmos:
|
||||
mpd_url = mpd_url.replace('_noatmos', '')
|
||||
base_url = mpd_url.rsplit('/', 1)[0] + '/'
|
||||
r = SESSION.get(url=mpd_url)
|
||||
xml = xmltodict.parse(r.text, force_list={
|
||||
'Period', 'AdaptationSet', 'ContentProtection'
|
||||
})
|
||||
mpd = json.loads(json.dumps(xml))
|
||||
period = mpd['MPD']['Period']
|
||||
tracks = []
|
||||
for pb in period:
|
||||
tracks = tracks + pb['AdaptationSet']
|
||||
|
||||
def get_height(width, height):
|
||||
if width == '1920':
|
||||
return '1080'
|
||||
elif width in ('1280', '1248'):
|
||||
return '720'
|
||||
else:
|
||||
return height
|
||||
|
||||
def force_instance(x):
|
||||
if isinstance(x['Representation'], list):
|
||||
X = x['Representation']
|
||||
else:
|
||||
X = [x['Representation']]
|
||||
return X
|
||||
|
||||
def get_pssh(track):
|
||||
pssh = ''
|
||||
for t in track.get('ContentProtection', {}):
|
||||
if (t['@schemeIdUri'].lower() == 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed'
|
||||
and t.get('pssh', {}).get('#text')):
|
||||
pssh = t.get('pssh', {}).get('#text')
|
||||
return pssh
|
||||
|
||||
video_list = []
|
||||
for video_tracks in tracks:
|
||||
if video_tracks['@contentType'] == 'video':
|
||||
for x in video_tracks['Representation']:
|
||||
videoDict = {
|
||||
'Height':get_height(x['@width'], x['@height']),
|
||||
'Width':x['@width'],
|
||||
'Bandwidth':x['@bandwidth'],
|
||||
'ID':x['@id'],
|
||||
'Codec':x['@codecs'],
|
||||
'File_URL':x['BaseURL']}
|
||||
video_list.append(videoDict)
|
||||
video_list = sorted(video_list, key=(lambda k: int(k['Bandwidth'])))
|
||||
|
||||
if args.videocodec:
|
||||
if args.videocodec == 'h264':
|
||||
codec_s = 'avc1'
|
||||
if args.videocodec == 'hevc':
|
||||
codec_s = 'hvc1'
|
||||
if args.videocodec == 'hdr':
|
||||
codec_s = 'dvh1'
|
||||
|
||||
video_list_tmp = []
|
||||
for x in video_list:
|
||||
if codec_s in x['Codec']:
|
||||
video_list_tmp.append(x)
|
||||
video_list = video_list_tmp
|
||||
|
||||
while args.customquality != [] and int(video_list[(-1)]['Height']) > int(args.customquality[0]):
|
||||
video_list.pop(-1)
|
||||
|
||||
|
||||
audio_list = []
|
||||
for audio_tracks in tracks:
|
||||
if audio_tracks['@contentType'] == 'audio':
|
||||
isAD = False
|
||||
pssh = get_pssh(audio_tracks)
|
||||
try:
|
||||
if audio_tracks['Role']['@value']:
|
||||
isAD = True
|
||||
except KeyError:
|
||||
isAD = False
|
||||
|
||||
if isAD:
|
||||
lang_id = ReplaceCodeLanguages(audio_tracks["@lang"]) + '-ad'
|
||||
else:
|
||||
lang_id = ReplaceCodeLanguages(audio_tracks["@lang"])
|
||||
|
||||
for x in force_instance(audio_tracks):
|
||||
audio_dict = {
|
||||
'Bandwidth':x['@bandwidth'],
|
||||
'ID':x['@id'],
|
||||
'Language':lang_id,
|
||||
'Codec':x['@codecs'],
|
||||
'Channels':x['AudioChannelConfiguration']['@value'],
|
||||
'File_URL':x['BaseURL'],
|
||||
'isAD':isAD}
|
||||
audio_list.append(audio_dict)
|
||||
|
||||
audio_list = sorted(audio_list, key=(lambda k: (int(k['Bandwidth']), str(k['Language']))), reverse=True)
|
||||
|
||||
if args.only_2ch_audio:
|
||||
c = 0
|
||||
while c != len(audio_list):
|
||||
if '-3' in audio_list[c]['Codec'].split('=')[0]:
|
||||
audio_list.remove(audio_list[c])
|
||||
else:
|
||||
c += 1
|
||||
|
||||
if args.desc_audio:
|
||||
c = 0
|
||||
while c != len(audio_list):
|
||||
if not audio_list[c]['isAD']:
|
||||
audio_list.remove(audio_list[c])
|
||||
else:
|
||||
c += 1
|
||||
else:
|
||||
c = 0
|
||||
while c != len(audio_list):
|
||||
if audio_list[c]['isAD']:
|
||||
audio_list.remove(audio_list[c])
|
||||
else:
|
||||
c += 1
|
||||
|
||||
BitrateList = []
|
||||
AudioLanguageList = []
|
||||
for x in audio_list:
|
||||
BitrateList.append(x['Bandwidth'])
|
||||
AudioLanguageList.append(x['Language'])
|
||||
|
||||
BitrateList = alphanumericSort(list(set(BitrateList)))
|
||||
AudioLanguageList = alphanumericSort(list(set(AudioLanguageList)))
|
||||
audioList_new = []
|
||||
audio_Dict_new = {}
|
||||
for y in AudioLanguageList:
|
||||
counter = 0
|
||||
for x in audio_list:
|
||||
if x['Language'] == y and counter == 0:
|
||||
audio_Dict_new = {
|
||||
'Language':x['Language'],
|
||||
'Bandwidth':x['Bandwidth'],
|
||||
'Codec': x['Codec'],
|
||||
'Channels': x['Channels'],
|
||||
'File_URL':x['File_URL'],
|
||||
'isAD':x['isAD']
|
||||
}
|
||||
audioList_new.append(audio_Dict_new)
|
||||
counter = counter + 1
|
||||
|
||||
audioList = audioList_new
|
||||
audio_list = sorted(audioList, key=(lambda k: (int(k['Bandwidth']), str(k['Language']))))
|
||||
|
||||
audioList_new = []
|
||||
if args.audiolang:
|
||||
for x in audio_list:
|
||||
langAbbrev = x['Language']
|
||||
if langAbbrev in list(args.audiolang):
|
||||
audioList_new.append(x)
|
||||
audio_list = audioList_new
|
||||
|
||||
return (video_list, audio_list, pssh, base_url)
|
||||
|
||||
def get_episodes(ep_str, num_eps):
|
||||
eps = ep_str.split(',')
|
||||
eps_final = []
|
||||
|
||||
for ep in eps:
|
||||
if '-' in ep:
|
||||
(start, end) = ep.split('-')
|
||||
start = int(start)
|
||||
end = int(end or num_eps)
|
||||
eps_final += list(range(start, end + 1))
|
||||
else:
|
||||
eps_final.append(int(ep))
|
||||
|
||||
return eps_final
|
||||
|
||||
def get_season(series_id):
|
||||
seasons = []
|
||||
if args.season:
|
||||
if args.season == 'all':
|
||||
seasons = 'all'
|
||||
elif ',' in args.season:
|
||||
seasons = [int(x) for x in args.season.split(',')]
|
||||
elif '-' in args.season:
|
||||
(start, end) = args.season.split('-')
|
||||
seasons = list(range(int(start), int(end) + 1))
|
||||
else:
|
||||
seasons = [int(args.season)]
|
||||
|
||||
season_req = SESSION.post(url=content_url, headers=refresh_token(), json=[{"id":series_id}], proxies=proxy_cfg.get_proxy('meta')).json()[0]['body']
|
||||
try:
|
||||
if seasons == 'all':
|
||||
seasons = [num for num, season in enumerate(season_req['references']['seasons'], start=1)]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
for season_num in seasons:
|
||||
if args.all_season:
|
||||
episode_list = season_req['references']['episodes']
|
||||
else:
|
||||
try:
|
||||
season_id = season_req['references']['seasons'][int(season_num)-1]
|
||||
episode_req = SESSION.post(url=content_url, headers=refresh_token(), json=[{"id":season_id}], proxies=proxy_cfg.get_proxy('meta')).json()[0]['body']
|
||||
episode_list = episode_req['references']['episodes']
|
||||
except KeyError:
|
||||
episode_list = season_req['references']['episodes']
|
||||
|
||||
episodes_list_new = []
|
||||
for num, ep in enumerate(episode_list, start=1):
|
||||
episodes_list_new.insert(num - 0, {
|
||||
'id': ep,
|
||||
'episode_num': num})
|
||||
episode_list = sorted(episodes_list_new, key=lambda x: x['episode_num'])
|
||||
|
||||
if args.episodeStart:
|
||||
eps = get_episodes(args.episodeStart, len(episode_list))
|
||||
episode_list = [x for x in episode_list if x['episode_num'] in eps]
|
||||
|
||||
for episode in episode_list:
|
||||
get_metadata(content_id=episode['id'])
|
||||
|
||||
def get_video_id(content_id):
|
||||
video_id = 'preview'
|
||||
while 'preview' in video_id:
|
||||
video_resp = SESSION.post(url=content_url, headers=refresh_token(), json=HMAXClient.get_video_payload(content_id)).json()
|
||||
if video_resp[0]["statusCode"] > 200:
|
||||
print(video_resp[0]['body']['message'])
|
||||
exit(1)
|
||||
video_id = [item['body']['references']['video'] for (i, item) in enumerate(video_resp) if 'video' in item['body']['references']][0]
|
||||
mpd_url, length, subs_list, chapters = get_infos(video_id)
|
||||
return video_resp[0]['body'], mpd_url, length, subs_list, chapters
|
||||
|
||||
def get_infos(video_id):
|
||||
video_json = SESSION.post(url=content_url, headers=refresh_token(), json=HMAXClient.get_video_payload(video_id)).json()[0]['body']
|
||||
try:
|
||||
mpd_url = video_json['fallbackManifest']
|
||||
except KeyError:
|
||||
mpd_url = video_json['manifest']
|
||||
for x in video_json['videos']:
|
||||
if x['type'] == 'urn:video:main':
|
||||
length = float(x['duration'])
|
||||
return mpd_url, length, get_subtitles(video_json), get_chapters(video_json)
|
||||
|
||||
def get_chapters(video_json):
|
||||
chapters = []
|
||||
for x in video_json['videos']:
|
||||
if 'annotations' in x:
|
||||
for (i, chapter) in enumerate(x['annotations']):
|
||||
secs, ms = divmod(chapter['start'], 1)
|
||||
mins, secs = divmod(secs, 60)
|
||||
hours, mins = divmod(mins, 60)
|
||||
ms = ms * 10000;
|
||||
chapter_time = '%02d:%02d:%02d.%04d' % (hours, mins, secs, ms)
|
||||
chapters.append({'TEXT':chapter['secondaryType'], 'TIME': chapter_time})
|
||||
return chapters
|
||||
|
||||
def get_subtitles(video_json):
|
||||
subs_list = []
|
||||
for x in video_json['videos']:
|
||||
if x['type'] == 'urn:video:main':
|
||||
if 'textTracks' in x:
|
||||
for sub in x['textTracks']:
|
||||
|
||||
isCC = False
|
||||
if 'ClosedCaptions' in sub["type"]:
|
||||
isCC = True
|
||||
isNormal = False
|
||||
if isCC:
|
||||
lang_id = ReplaceCodeLanguages(sub['language']) + '-sdh'
|
||||
trackType = 'SDH'
|
||||
else:
|
||||
lang_id = ReplaceCodeLanguages(sub['language'])
|
||||
isNormal = True
|
||||
trackType = 'NORMAL'
|
||||
isForced = False
|
||||
if sub["type"] == "Forced":
|
||||
isForced = True
|
||||
isNormal = False
|
||||
trackType = 'FORCED'
|
||||
lang_id = ReplaceCodeLanguages(sub['language']) + '-forced'
|
||||
subsDict = {
|
||||
'Language':lang_id,
|
||||
'URL':sub['url'],
|
||||
'isCC':isCC,
|
||||
'isForced':isForced,
|
||||
'isNormal':isNormal,
|
||||
'Type':trackType}
|
||||
subs_list.append(subsDict)
|
||||
|
||||
subs_list_new = []
|
||||
subs_for_list_new = []
|
||||
for subs in subs_list:
|
||||
isForced = subs['isForced']
|
||||
if isForced:
|
||||
subs_for_list_new.append(subs)
|
||||
else:
|
||||
subs_list_new.append(subs)
|
||||
|
||||
subs_for_list = []
|
||||
for subs in subs_for_list_new:
|
||||
lang = subs['Language']
|
||||
if args.forcedlang:
|
||||
if lang in args.forcedlang:
|
||||
subs_for_list.append(subs)
|
||||
else:
|
||||
subs_for_list.append(subs)
|
||||
|
||||
subs_list = []
|
||||
for subs in subs_list_new:
|
||||
lang = subs['Language']
|
||||
if args.sublang:
|
||||
if lang in args.sublang:
|
||||
subs_list.append(subs)
|
||||
else:
|
||||
subs_list.append(subs)
|
||||
|
||||
subs_list_new = []
|
||||
subs_list_new = subs_list + subs_for_list
|
||||
subs_list = subs_list_new
|
||||
|
||||
return subs_list
|
||||
|
||||
def get_metadata(content_id):
|
||||
meta_resp, mpd_url, length, subs_list, chapters = get_video_id(content_id)
|
||||
|
||||
if 'feature' in args.url_season:
|
||||
hbomaxType = "movie"
|
||||
releaseYear = meta_resp['releaseYear']
|
||||
seriesTitles = meta_resp['titles']['full']
|
||||
episodeTitle = meta_resp['titles']['full']
|
||||
|
||||
if 'numberInSeries' in meta_resp:
|
||||
hbomaxType = "show"
|
||||
numberInSeries = meta_resp['numberInSeries']
|
||||
seriesTitles = meta_resp['seriesTitles']['full']
|
||||
episodeTitle = meta_resp['titles']['full']
|
||||
|
||||
if 'numberInSeason' in meta_resp:
|
||||
hbomaxType = "show"
|
||||
seriesTitles = meta_resp['seriesTitles']['full']
|
||||
seasonNumber = meta_resp['seasonNumber']
|
||||
episodeNumber = meta_resp['numberInSeason']
|
||||
episodeTitle = meta_resp['titles']['full']
|
||||
|
||||
if hbomaxType=="movie":
|
||||
seriesName = f'{replace_words(episodeTitle)} ({releaseYear})'
|
||||
folderName = None
|
||||
|
||||
if hbomaxType=="show":
|
||||
try:
|
||||
seriesName = f'{replace_words(seriesTitles)} S{seasonNumber:02}E{episodeNumber:02} - {replace_words(episodeTitle)}'
|
||||
folderName = f'{replace_words(seriesTitles)} S{seasonNumber:02}'
|
||||
except UnboundLocalError:
|
||||
seriesName = f'{replace_words(seriesTitles)} E{numberInSeries:02} - {replace_words(episodeTitle)}'
|
||||
folderName = f'{replace_words(seriesTitles)}'
|
||||
|
||||
start_process(seriesName, folderName, subs_list, mpd_url, length, chapters, hbomaxType)
|
||||
|
||||
def start_process(seriesName, folderName, subs_list, mpd_url, length, chapters, hbomaxType):
|
||||
video_list, audio_list, pssh, base_url = mpd_parse(mpd_url)
|
||||
video_bandwidth = dict(video_list[(-1)])['Bandwidth']
|
||||
video_height = str(dict(video_list[(-1)])['Height'])
|
||||
video_width = str(dict(video_list[(-1)])['Width'])
|
||||
video_codec = str(dict(video_list[(-1)])['Codec'])
|
||||
if not args.license:
|
||||
if not args.novideo:
|
||||
print('\nVIDEO - Bitrate: ' + convert_size(int(video_bandwidth)) + ' - Profile: ' + video_codec.split('=')[0] + ' - Size: ' + get_size(length * float(video_bandwidth) * 0.125) + ' - Dimensions: ' + video_width + 'x' + video_height)
|
||||
print()
|
||||
if not args.noaudio:
|
||||
if audio_list != []:
|
||||
for x in audio_list:
|
||||
audio_bandwidth = x['Bandwidth']
|
||||
audio_representation_id = str(x['Codec'])
|
||||
audio_lang = x['Language']
|
||||
print('AUDIO - Bitrate: ' + convert_size(int(audio_bandwidth)) + ' - Profile: ' + audio_representation_id.split('=')[0] + ' - Size: ' + get_size(length * float(audio_bandwidth) * 0.125) + ' - Language: ' + audio_lang)
|
||||
print()
|
||||
if not args.nosubs:
|
||||
if subs_list != []:
|
||||
for z in subs_list:
|
||||
sub_lang = str(dict(z)['Language'])
|
||||
sub_profile = str(dict(z)['Type'])
|
||||
print('SUBTITLE - Profile: '+ sub_profile +' - Language: ' + sub_lang)
|
||||
print()
|
||||
print('Name: ' + seriesName + '\n')
|
||||
|
||||
if args.license:
|
||||
format_mpd = ""
|
||||
if 'hvc1' in video_codec:
|
||||
format_mpd = "HEVC KEYS"
|
||||
keys_all = get_keys(pssh)
|
||||
with open(keys_file, 'a', encoding='utf8') as (file):
|
||||
file.write(seriesName + format_mpd + '\n')
|
||||
for key in keys_all:
|
||||
with open(keys_file, 'a', encoding='utf8') as (file):
|
||||
file.write(key + '\n')
|
||||
print('\n' + seriesName + ' ' + format_mpd + '\n' + key)
|
||||
|
||||
else:
|
||||
'''
|
||||
if args.tag:
|
||||
from pywidevine.clients.dictionary import get_release_tag
|
||||
for x in audio_list:
|
||||
isDual = False
|
||||
audio_total = len(audio_list)
|
||||
if audio_total > 1:
|
||||
isDual = True
|
||||
seriesName = get_release_tag(seriesName, video_codec, video_height, x['Codec'], x['Channels'], x['Bandwidth'], 'HMAX', str(args.tag), isDual)
|
||||
'''
|
||||
|
||||
if hbomaxType == 'show':
|
||||
CurrentName = seriesName
|
||||
CurrentHeigh = str(video_height)
|
||||
if 'hvc1' in video_codec:
|
||||
VideoOutputName = folderdownloader + '\\' + str(folderName) + str(CurrentName) + ' [' + str(CurrentHeigh) + 'p] [HEVC].mkv'
|
||||
if 'dvh1' in video_codec:
|
||||
VideoOutputName = folderdownloader + '\\' + str(folderName) + str(CurrentName) + ' [' + str(CurrentHeigh) + 'p] [HDR].mkv'
|
||||
else:
|
||||
VideoOutputName = folderdownloader + '\\' + str(folderName) + str(CurrentName) + ' [' + str(CurrentHeigh) + 'p].mkv'
|
||||
|
||||
else:
|
||||
CurrentName = seriesName
|
||||
CurrentHeigh = str(video_height)
|
||||
if 'hvc1' in video_codec:
|
||||
VideoOutputName = str(CurrentName) + ' [' + str(CurrentHeigh) + 'p] [HEVC].mkv'
|
||||
if 'dvh1' in video_codec:
|
||||
VideoOutputName = str(CurrentName) + ' [' + str(CurrentHeigh) + 'p] [HDR].mkv'
|
||||
else:
|
||||
VideoOutputName = str(CurrentName) + ' [' + str(CurrentHeigh) + 'p].mkv'
|
||||
|
||||
if not args.novideo or (not args.noaudio):
|
||||
print("Getting KEYS...")
|
||||
keys_all = get_keys(pssh)
|
||||
if not keys_all:
|
||||
print('License request failed, using keys from txt')
|
||||
keys_all = keys_file_txt
|
||||
if args.licenses_as_json:
|
||||
with open(keys_file, "a", encoding="utf8") as file:
|
||||
file.write(seriesName + "\n")
|
||||
for key in keys_all:
|
||||
with open(keys_file, "a", encoding="utf8") as file:
|
||||
file.write(key + "\n")
|
||||
print("Done!\n")
|
||||
|
||||
if not os.path.isfile(VideoOutputName):
|
||||
|
||||
print('Downloading video & audio')
|
||||
aria2c_input = ''
|
||||
if not args.novideo:
|
||||
if 'hvc1' in video_codec:
|
||||
inputVideo = seriesName + ' [' + str(CurrentHeigh) + 'p] [HEVC].mp4'
|
||||
if 'dvh1' in video_codec:
|
||||
inputVideo = seriesName + ' [' + str(CurrentHeigh) + 'p] [HDR].mp4'
|
||||
else:
|
||||
inputVideo = seriesName + ' [' + str(CurrentHeigh) + 'p].mp4'
|
||||
if os.path.isfile(inputVideo) and not os.path.isfile(inputVideo + '.aria2'):
|
||||
print('\n' + inputVideo + '\nFile has already been successfully downloaded previously.\n')
|
||||
else:
|
||||
url = urllib.parse.urljoin(base_url, video_list[(-1)]['File_URL'])
|
||||
aria2c_input += f'{url}\n'
|
||||
aria2c_input += f'\tdir={folderdownloader}\n'
|
||||
aria2c_input += f'\tout={inputVideo}\n'
|
||||
|
||||
#downloadFile(base_url + video_list[(-1)]['File_URL'], inputVideo)
|
||||
|
||||
if not args.noaudio:
|
||||
for x in audio_list:
|
||||
langAbbrev = x['Language']
|
||||
inputAudio = seriesName + ' ' + '(' + langAbbrev + ')' + '.mp4'
|
||||
inputAudio_ac3 = seriesName + ' ' + '(' + langAbbrev + ')' + '.ac3'
|
||||
inputAudio_eac3 = seriesName + ' ' + '(' + langAbbrev + ')' + '.eac3'
|
||||
inputAudio_m4a = seriesName + ' ' + '(' + langAbbrev + ')' + '.m4a'
|
||||
if os.path.isfile(inputAudio) and not os.path.isfile(inputAudio + '.aria2') or os.path.isfile(inputAudio_ac3) or os.path.isfile(inputAudio_m4a) or os.path.isfile(inputAudio_eac3):
|
||||
print('\n' + inputAudio + '\nFile has already been successfully downloaded previously.\n')
|
||||
else:
|
||||
url = urllib.parse.urljoin(base_url, x['File_URL'])
|
||||
aria2c_input += f'{url}\n'
|
||||
aria2c_input += f'\tdir={folderdownloader}\n'
|
||||
aria2c_input += f'\tout={inputAudio}\n'
|
||||
|
||||
aria2c_infile = os.path.join(folderdownloader, 'aria2c_infile.txt')
|
||||
with open(aria2c_infile, 'w') as fd:
|
||||
fd.write(aria2c_input)
|
||||
aria2c_opts = [
|
||||
HMAXConfig.ARIA2C,
|
||||
'--allow-overwrite=true',
|
||||
'--download-result=hide',
|
||||
'--console-log-level=warn',
|
||||
'-x16', '-s16', '-j16',
|
||||
'-i', aria2c_infile]
|
||||
subprocess.run(aria2c_opts, check=True)
|
||||
|
||||
if not args.nosubs:
|
||||
if subs_list != []:
|
||||
for z in subs_list:
|
||||
langAbbrev = str(dict(z)['Language'])
|
||||
inputSubtitle = seriesName + " " + "(" + langAbbrev + ")"
|
||||
if os.path.isfile(inputSubtitle + ".xml") or os.path.isfile(inputSubtitle + ".srt"):
|
||||
print("\n" + inputSubtitle + "\nFile has already been successfully downloaded previously.\n")
|
||||
else:
|
||||
downloadFile2(str(dict(z)['URL']), inputSubtitle + ".xml")
|
||||
SubtitleEdit_process = subprocess.Popen([HMAXConfig.SUBTITLE_EDIT, "/convert", inputSubtitle + ".xml", "srt", "/fixcommonerrors", "/encoding:utf-8", "/RemoveLineBreaks"], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).wait()
|
||||
for f in glob.glob(inputSubtitle + ".xml"):
|
||||
os.remove(f)
|
||||
print("Done!\n")
|
||||
else:
|
||||
print ("\nNo subtitles available.")
|
||||
|
||||
if not args.nochpaters:
|
||||
if chapters != []:
|
||||
print('\nGenerating chapters file...')
|
||||
if os.path.isfile(seriesName + ' Chapters.txt'):
|
||||
print(seriesName + " Chapters.txt" + " has already been successfully downloaded previously.")
|
||||
else:
|
||||
counter = 1
|
||||
with open(seriesName + ' Chapters.txt', 'a', encoding='utf-8') as f:
|
||||
for x in chapters:
|
||||
f.write("CHAPTER" + f'{counter:02}' + "=" + x["TIME"] + "\n" + "CHAPTER" + f'{counter:02}' + "NAME=" + x["TEXT"] + "\n")
|
||||
counter = counter + 1
|
||||
print('Done!\n')
|
||||
else:
|
||||
print("\nNo chapters available.")
|
||||
|
||||
#~NOTE: aqui faz de tudo! Extrai as keys, faz decrypt e muxa os arquivos
|
||||
|
||||
CorrectDecryptVideo = False
|
||||
if not args.novideo:
|
||||
if 'hvc1' in video_codec:
|
||||
inputVideo = seriesName + ' [' + str(CurrentHeigh) + 'p] [HEVC].mp4'
|
||||
if 'dvh1' in video_codec:
|
||||
inputVideo = seriesName + ' [' + str(CurrentHeigh) + 'p] [HDR].mp4'
|
||||
else:
|
||||
inputVideo = seriesName + ' [' + str(CurrentHeigh) + 'p].mp4'
|
||||
if os.path.isfile(inputVideo):
|
||||
CorrectDecryptVideo = DecryptVideo(inputVideo=inputVideo, keys_video=keys_all)
|
||||
else:
|
||||
CorrectDecryptVideo = True
|
||||
|
||||
CorrectDecryptAudio = False
|
||||
if not args.noaudio:
|
||||
for x in audio_list:
|
||||
langAbbrev = x['Language']
|
||||
inputAudio = seriesName + ' ' + '(' + langAbbrev + ')' + '.mp4'
|
||||
if os.path.isfile(inputAudio):
|
||||
CorrectDecryptAudio = DecryptAudio(inputAudio=inputAudio, keys_audio=keys_all)
|
||||
else:
|
||||
CorrectDecryptAudio = True
|
||||
|
||||
if not args.nomux:
|
||||
if not args.novideo:
|
||||
if not args.noaudio:
|
||||
if CorrectDecryptVideo == True:
|
||||
if CorrectDecryptAudio == True:
|
||||
print('\nMuxing...')
|
||||
|
||||
if hbomaxType=="show":
|
||||
MKV_Muxer=Muxer(CurrentName=CurrentName,
|
||||
SeasonFolder=folderName,
|
||||
CurrentHeigh=CurrentHeigh,
|
||||
Type=hbomaxType,
|
||||
mkvmergeexe=HMAXConfig.MKVMERGE)
|
||||
|
||||
else:
|
||||
MKV_Muxer=Muxer(CurrentName=CurrentName,
|
||||
SeasonFolder=None,
|
||||
CurrentHeigh=CurrentHeigh,
|
||||
Type=hbomaxType,
|
||||
mkvmergeexe=HMAXConfig.MKVMERGE)
|
||||
|
||||
MKV_Muxer.mkvmerge_muxer(lang="English")
|
||||
|
||||
if args.tag:
|
||||
if 'hvc1' in video_codec:
|
||||
inputName = CurrentName + ' [' + CurrentHeigh + 'p] [HEVC].mkv'
|
||||
if 'dvh1' in video_codec:
|
||||
inputName = seriesName + ' [' + str(CurrentHeigh) + 'p] [HDR].mkv'
|
||||
else:
|
||||
inputName = CurrentName + ' [' + CurrentHeigh + 'p].mkv'
|
||||
|
||||
release_group(base_filename=inputName,
|
||||
default_filename=CurrentName,
|
||||
folder_name=folderName,
|
||||
type=hbomaxType,
|
||||
video_height=CurrentHeigh)
|
||||
|
||||
if not args.keep:
|
||||
for f in os.listdir():
|
||||
if re.fullmatch(re.escape(CurrentName) + r'.*\.(mp4|m4a|h264|h265|eac3|ac3|srt|txt|avs|lwi|mpd)', f):
|
||||
os.remove(f)
|
||||
print('Done!')
|
||||
else:
|
||||
print("File '" + str(VideoOutputName) + "' already exists.")
|
||||
|
||||
def title_parse(x):
|
||||
m = re.match(r'https?://(play\.hbomax\.com/|(?:www\.)hbomax\.com/)(?:page|feature|series|episode)/(urn?:hbo?:(?:feature|series|page|episode):.+?$)', x)
|
||||
if m:
|
||||
if 'type' in m[2] and 'series' in m[2]:
|
||||
m = 'urn:hbo:series:{}'.format(m[2].split(':')[-3])
|
||||
elif 'type' in m[2] and 'feature' in m[2]:
|
||||
m = 'urn:hbo:feature:{}'.format(m[2].split(':')[-3])
|
||||
elif 'type' in m[2] and 'episode' in m[2]:
|
||||
m = 'urn:hbo:episode:{}'.format(m[2].split(':')[-3])
|
||||
else:
|
||||
m = m[2]
|
||||
return m
|
||||
|
||||
from pywidevine.decrypt.wvdecryptcustom import WvDecrypt
|
||||
from pywidevine.cdm import cdm, deviceconfig
|
||||
|
||||
def get_keys(pssh):
|
||||
device = deviceconfig.device_android_generic
|
||||
wvdecrypt = WvDecrypt(init_data_b64=bytes(pssh.encode()), cert_data_b64=None, device=device)
|
||||
|
||||
license_req = SESSION.post(url=license_wv, headers=refresh_token(), data=wvdecrypt.get_challenge()).content
|
||||
license_b64 = base64.b64encode(license_req)
|
||||
|
||||
wvdecrypt.update_license(license_b64)
|
||||
status, keys = wvdecrypt.start_process()
|
||||
return keys
|
||||
|
||||
def release_group(base_filename, default_filename, folder_name, type, video_height):
|
||||
if type=='show':
|
||||
video_mkv = os.path.join(folder_name, base_filename)
|
||||
else:
|
||||
video_mkv = base_filename
|
||||
|
||||
mediainfo = mediainfo_(video_mkv)
|
||||
for v in mediainfo['media']['track']: # mediainfo do video
|
||||
if v['@type'] == 'Video':
|
||||
video_format = v['Format']
|
||||
|
||||
video_codec = ''
|
||||
if video_format == "AVC":
|
||||
video_codec = 'H.264'
|
||||
elif video_format == "HEVC":
|
||||
video_codec = 'H.265'
|
||||
|
||||
for m in mediainfo['media']['track']: # mediainfo do audio
|
||||
if m['@type'] == 'Audio':
|
||||
codec_name = m['Format']
|
||||
channels_number = m['Channels']
|
||||
|
||||
audio_codec = ''
|
||||
audio_channels = ''
|
||||
if codec_name == "AAC":
|
||||
audio_codec = 'AAC'
|
||||
elif codec_name == "AC-3":
|
||||
audio_codec = "DD"
|
||||
elif codec_name == "E-AC-3":
|
||||
audio_codec = "DDP"
|
||||
elif codec_name == "E-AC-3 JOC":
|
||||
audio_codec = "Atmos"
|
||||
|
||||
if channels_number == "2":
|
||||
audio_channels = "2.0"
|
||||
elif channels_number == "6":
|
||||
audio_channels = "5.1"
|
||||
|
||||
audio_ = audio_codec + audio_channels
|
||||
|
||||
# renomear arquivo
|
||||
default_filename = default_filename.replace('&', '.and.')
|
||||
default_filename = re.sub(r'[]!"#$%\'()*+,:;<=>?@\\^_`{|}~[-]', '', default_filename)
|
||||
default_filename = default_filename.replace(' ', '.')
|
||||
default_filename = re.sub(r'\.{2,}', '.', default_filename)
|
||||
|
||||
output_name = '{}.{}p.HMAX.WEB-DL.{}.{}-{}'.format(default_filename, video_height, audio_, video_codec, args.tag)
|
||||
if type=='show':
|
||||
outputName = os.path.join(folder_name, output_name + '.mkv')
|
||||
else:
|
||||
outputName = output_name + '.mkv'
|
||||
|
||||
os.rename(video_mkv, outputName)
|
||||
print("{} -> {}".format(base_filename, output_name))
|
||||
|
||||
def DecryptAudio(inputAudio, keys_audio):
|
||||
key_audio_id_original = getKeyId(inputAudio)
|
||||
outputAudioTemp = inputAudio.replace('.mp4', '_dec.mp4')
|
||||
if key_audio_id_original != 'nothing':
|
||||
for key in keys_audio:
|
||||
key_id = key[0:32]
|
||||
if key_id == key_audio_id_original:
|
||||
print('\nDecrypting audio...')
|
||||
print('Using KEY: ' + key)
|
||||
wvdecrypt_process = subprocess.Popen([HMAXConfig.MP4DECRYPT, '--show-progress', '--key', key, inputAudio, outputAudioTemp])
|
||||
stdoutdata, stderrdata = wvdecrypt_process.communicate()
|
||||
wvdecrypt_process.wait()
|
||||
time.sleep(0.05)
|
||||
os.remove(inputAudio)
|
||||
print('\nDemuxing audio...')
|
||||
mediainfo = mediainfo_(outputAudioTemp)
|
||||
for m in mediainfo['media']['track']:
|
||||
if m['@type'] == 'Audio':
|
||||
codec_name = m['Format']
|
||||
try:
|
||||
codec_tag_string = m['Format_Commercial_IfAny']
|
||||
except Exception:
|
||||
codec_tag_string = ''
|
||||
ext = ''
|
||||
if codec_name == "AAC":
|
||||
ext = '.m4a'
|
||||
elif codec_name == "E-AC-3":
|
||||
ext = ".eac3"
|
||||
elif codec_name == "AC-3":
|
||||
ext = ".ac3"
|
||||
outputAudio = outputAudioTemp.replace("_dec.mp4", ext)
|
||||
print("{} -> {}".format(outputAudioTemp, outputAudio))
|
||||
ff = ffmpy.FFmpeg(executable=HMAXConfig.FFMPEG, inputs={outputAudioTemp: None}, outputs={outputAudio: '-c copy'}, global_options="-y -hide_banner -loglevel warning")
|
||||
ff.run()
|
||||
time.sleep (50.0/1000.0)
|
||||
os.remove(outputAudioTemp)
|
||||
print("Done!")
|
||||
return True
|
||||
|
||||
elif key_audio_id_original == "nothing":
|
||||
return True
|
||||
|
||||
def DecryptVideo(inputVideo, keys_video):
|
||||
key_video_id_original = getKeyId(inputVideo)
|
||||
inputVideo = inputVideo
|
||||
outputVideoTemp = inputVideo.replace('.mp4', '_dec.mp4')
|
||||
outputVideo = inputVideo
|
||||
if key_video_id_original != 'nothing':
|
||||
for key in keys_video:
|
||||
key_id = key[0:32]
|
||||
if key_id == key_video_id_original:
|
||||
print('\nDecrypting video...')
|
||||
print('Using KEY: ' + key)
|
||||
wvdecrypt_process = subprocess.Popen([HMAXConfig.MP4DECRYPT, '--show-progress', '--key', key, inputVideo, outputVideoTemp])
|
||||
stdoutdata, stderrdata = wvdecrypt_process.communicate()
|
||||
wvdecrypt_process.wait()
|
||||
print('\nRemuxing video...')
|
||||
ff = ffmpy.FFmpeg(executable=HMAXConfig.FFMPEG, inputs={outputVideoTemp: None}, outputs={outputVideo: '-c copy'}, global_options='-y -hide_banner -loglevel warning')
|
||||
ff.run()
|
||||
time.sleep(0.05)
|
||||
os.remove(outputVideoTemp)
|
||||
print('Done!')
|
||||
return True
|
||||
|
||||
elif key_video_id_original == 'nothing':
|
||||
return True
|
||||
|
||||
global content_id
|
||||
content_id = title_parse(args.url_season)
|
||||
|
||||
if 'series' in args.url_season:
|
||||
if not args.season:
|
||||
args.season = 'all'
|
||||
get_season(content_id)
|
||||
elif 'feature' or 'episode':
|
||||
get_metadata(content_id=content_id)
|
|
@ -0,0 +1,84 @@
|
|||
-h, --help show this help message and exit
|
||||
-t TITLE, --title TITLE
|
||||
title id
|
||||
-wids [WATCH_IDS [WATCH_IDS ...]], --watch-ids [WATCH_IDS [WATCH_IDS ...]]
|
||||
episode / watch id, extract it straight from an
|
||||
episode/extra url
|
||||
--trailers download trailers for the specified title
|
||||
-o [OUTPUTFILE], --outputfile [OUTPUTFILE]
|
||||
output filename (no extension)
|
||||
-q {sd,sd-baseline,sd-main,720p,1080p,2160p,best}, --quality {sd,sd-baseline,sd-main,720p,1080p,2160p,best}
|
||||
video resolution
|
||||
-a AUDIOLANG, --audiolang AUDIOLANG
|
||||
audio language
|
||||
-c CODEC, --codec CODEC
|
||||
video type to download
|
||||
-k, --skip-cleanup skip cleanup step
|
||||
-dm, --dont-mux move unmuxed tracks instead of muxing
|
||||
-i, --info print track information and exit
|
||||
-d, --debug print debug statements
|
||||
-n {once,every,off}, --notify {once,every,off}
|
||||
choose notification mode
|
||||
-S, --subs-only download subtitles and exit
|
||||
-sl SUB_LANG, --sub-lang SUB_LANG
|
||||
subtitle languages to download
|
||||
-st {dfxp,vtt,none}, --sub-type {dfxp,vtt,none}
|
||||
subtitle format to download
|
||||
-sc {srt,ass,none}, --sub-convert {srt,ass,none}
|
||||
subtitle format to convert to
|
||||
-A, --audio-only download audio and exit
|
||||
-sA, --skip-audio do NOT download any audio tracks
|
||||
-ad, --audio-description
|
||||
download audio description tracks instead of regular
|
||||
ones
|
||||
-at, --all-tracks download and merge ALL tracks
|
||||
-s SEASON, --season SEASON
|
||||
lookup and download season from title id
|
||||
-E EPISODE, --episode EPISODE
|
||||
rip only specified episode(s) in season mode
|
||||
-e EPISODE_START, --episode_start EPISODE_START
|
||||
Recursively rip season number that provided viewable
|
||||
ID belongs to, starting at the episode provided
|
||||
--skip SKIP skip episodes in season mode
|
||||
--web-login
|
||||
--no-web-login use web login method instead of Android API login
|
||||
method
|
||||
--license do license request and print decryption keys only
|
||||
--licenses-as-json, -laj
|
||||
save the licenses as json instead
|
||||
--licenses-json-name LICENSES_JSON_NAME, -ljn LICENSES_JSON_NAME
|
||||
save the licenses-json to a specific name (will be
|
||||
"keys{name}.json")
|
||||
--licenses-json-overwrite, -ljo
|
||||
always overwrite keys.json if it exists (dangerous)
|
||||
--create-dl-info-json, -dij
|
||||
Create a JSON file with all metadata + track URLs
|
||||
(IIRC expire after 24 hours) + working VPN address (if
|
||||
any found in proxydata.json)
|
||||
--dl-info-json-name, -ijn
|
||||
Name of the json stored when using --create-dl-info-
|
||||
json/-dij
|
||||
-m, --store-meta store the metadata of the movies (json)
|
||||
--proxy PROXY proxy URL to use for both fetching metadata and
|
||||
downloading
|
||||
--proxy-meta PROXY_META
|
||||
proxy URL to use for fetching metadata
|
||||
--proxy-dl PROXY_DL proxy URL to use for downloading
|
||||
|
||||
-r us donde us es la region para estados unidos
|
||||
|
||||
|
||||
|
||||
[-h] [--url URL_SEASON] [--tqdm] [--nv] [--na] [--ns]
|
||||
[--all-season] [-e EPISODESTART] [-s SEASON] [--tag TAG]
|
||||
[-q CUSTOMQUALITY] [-o OUTPUT] [--keep] [--no-mux]
|
||||
[--only-2ch-audio] [--alang [AUDIOLANG [AUDIOLANG ...]]]
|
||||
[--slang [SUBLANG [SUBLANG ...]]]
|
||||
[--flang [FORCEDLANG [FORCEDLANG ...]]] [--hevc] [--uhd]
|
||||
[--only-keys] [--debug] [--aformat-51ch AFORMAT_51CH] [--nc]
|
||||
[-c {widevine,playready}] [--atmos] [--ad] [--hdr]
|
||||
[-r {la,us}] [--m3u8] [--file TXTPATH] [--tlang TITLELANG]
|
||||
[--scenario SCENARIO] [--proxy PROXY]
|
||||
[content]
|
||||
|
||||
|
|
@ -0,0 +1,788 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Module: Paramount Plus Downloader
|
||||
# Created on: 19-02-2021
|
||||
# Authors: JUNi
|
||||
# Version: 2.0
|
||||
|
||||
import urllib.parse
|
||||
import re, base64, requests, sys, os
|
||||
import subprocess, shutil
|
||||
import xmltodict, isodate
|
||||
import json, ffmpy
|
||||
import http, html, time
|
||||
|
||||
from unidecode import unidecode
|
||||
from http.cookiejar import MozillaCookieJar
|
||||
from titlecase import titlecase
|
||||
from pymediainfo import MediaInfo
|
||||
|
||||
import pywidevine.clients.paramountplus.config as pmnp_cfg
|
||||
from pywidevine.clients.proxy_config import ProxyConfig
|
||||
from pywidevine.muxer.muxer import Muxer
|
||||
|
||||
from pywidevine.clients.paramountplus.downloader import WvDownloader
|
||||
from pywidevine.clients.paramountplus.config import WvDownloaderConfig
|
||||
|
||||
|
||||
currentFile = 'paramountplus'
|
||||
realPath = os.path.realpath(currentFile)
|
||||
dirPath = os.path.dirname(realPath)
|
||||
USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36'
|
||||
SESSION = requests.Session()
|
||||
|
||||
def main(args):
|
||||
global _id
|
||||
|
||||
proxies = {}
|
||||
proxy_meta = args.proxy
|
||||
if proxy_meta == 'none':
|
||||
proxies['meta'] = {'http': None, 'https': None}
|
||||
elif proxy_meta:
|
||||
proxies['meta'] = {'http': proxy_meta, 'https': proxy_meta}
|
||||
SESSION.proxies = proxies.get('meta')
|
||||
proxy_cfg = ProxyConfig(proxies)
|
||||
|
||||
if not os.path.exists(dirPath + '/KEYS'):
|
||||
os.makedirs(dirPath + '/KEYS')
|
||||
else:
|
||||
keys_file = dirPath + '/KEYS/PARAMOUNTPLUS.txt'
|
||||
try:
|
||||
keys_file_pmnp = open(keys_file, 'r')
|
||||
keys_file_txt = keys_file_pmnp.readlines()
|
||||
except Exception:
|
||||
with open(keys_file, 'a', encoding='utf8') as (file):
|
||||
file.write('##### One KEY per line. #####\n')
|
||||
keys_file_pmnp = open(keys_file, 'r', encoding='utf8')
|
||||
keys_file_txt = keys_file_pmnp.readlines()
|
||||
|
||||
def alphanumericSort(l):
|
||||
def convert(text):
|
||||
if text.isdigit():
|
||||
return int(text)
|
||||
else:
|
||||
return text
|
||||
|
||||
def alphanum_key(key):
|
||||
return [convert(c) for c in re.split('([0-9]+)', key)]
|
||||
|
||||
return sorted(l, key=alphanum_key)
|
||||
|
||||
def convert_size(size_bytes):
|
||||
if size_bytes == 0:
|
||||
return '0bps'
|
||||
else:
|
||||
s = round(size_bytes / 1000, 0)
|
||||
return '%ikbps' % s
|
||||
|
||||
def get_size(size):
|
||||
power = 1024
|
||||
n = 0
|
||||
Dic_powerN = {0:'', 1:'K', 2:'M', 3:'G', 4:'T'}
|
||||
while size > power:
|
||||
size /= power
|
||||
n += 1
|
||||
return str(round(size, 2)) + Dic_powerN[n] + 'B'
|
||||
|
||||
def getKeyId(name):
|
||||
mp4dump = subprocess.Popen([pmnp_cfg.MP4DUMP, name], stdout=(subprocess.PIPE))
|
||||
mp4dump = str(mp4dump.stdout.read())
|
||||
A = find_str(mp4dump, 'default_KID')
|
||||
KEY_ID_ORI = ''
|
||||
KEY_ID_ORI = mp4dump[A:A + 63].replace('default_KID = ', '').replace('[', '').replace(']', '').replace(' ', '')
|
||||
if KEY_ID_ORI == '' or KEY_ID_ORI == "'":
|
||||
KEY_ID_ORI = 'nothing'
|
||||
return KEY_ID_ORI
|
||||
|
||||
def find_str(s, char):
|
||||
index = 0
|
||||
if char in s:
|
||||
c = char[0]
|
||||
for ch in s:
|
||||
if ch == c:
|
||||
if s[index:index + len(char)] == char:
|
||||
return index
|
||||
index += 1
|
||||
|
||||
return -1
|
||||
|
||||
def mediainfo_(file):
|
||||
mediainfo_output = subprocess.Popen([MediaInfo_exe, '--Output=JSON', '-f', file], stdout=(subprocess.PIPE))
|
||||
mediainfo_json = json.load(mediainfo_output.stdout)
|
||||
return mediainfo_json
|
||||
|
||||
def ReplaceDontLikeWord(X):
|
||||
try:
|
||||
X = X.replace(" : ", " - ").replace(": ", " - ").replace(":", " - ").replace("&", "and").replace("+", "").replace(";", "").replace("ó", "o").\
|
||||
replace("[", "").replace("'", "").replace("]", "").replace("/", "").replace("//", "").\
|
||||
replace("’", "'").replace("*", "x").replace("<", "").replace(">", "").replace("|", "").\
|
||||
replace("~", "").replace("#", "").replace("%", "").replace("{", "").replace("}", "").replace(",", "").\
|
||||
replace("?","").replace("¿","")
|
||||
except Exception:
|
||||
X = X.replace(" : ", " - ").replace(": ", " - ").replace(":", " - ").replace("&", "and").replace("+", "").replace(";", "").\
|
||||
replace("ó", "o").replace("[", "").replace("'", "").replace("]", "").replace("/", "").\
|
||||
replace("//", "").replace("’", "'").replace("*", "x").replace("<", "").replace(">", "").replace("|", "").\
|
||||
replace("~", "").replace("#", "").replace("%", "").replace("{", "").replace("}", "").replace(",", "").\
|
||||
replace("?","").replace("¿","")
|
||||
|
||||
return titlecase(X)
|
||||
|
||||
def replace_code_lang(X):
|
||||
X = X.lower()
|
||||
X = X.replace('es-mx', 'es-la').replace('pt-BR', 'pt-br').replace('dolby digital', 'en').replace('dd+', 'en')
|
||||
return X
|
||||
|
||||
def get_cookies(file_path):
|
||||
try:
|
||||
cj = http.cookiejar.MozillaCookieJar(file_path)
|
||||
cj.load()
|
||||
except Exception:
|
||||
print('\nCookies not found! Please dump the cookies with the Chrome extension https://chrome.google.com/webstore/detail/cookiestxt/njabckikapfpffapmjgojcnbfjonfjfg and place the generated file in ' + file_path)
|
||||
print('\nWarning, do not click on "download all cookies", you have to click on "click here".\n')
|
||||
sys.exit(0)
|
||||
|
||||
cookies = str()
|
||||
for cookie in cj:
|
||||
cookie.value = urllib.parse.unquote(html.unescape(cookie.value))
|
||||
cookies = cookies + cookie.name + '=' + cookie.value + ';'
|
||||
|
||||
cookies = list(cookies)
|
||||
del cookies[-1]
|
||||
cookies = ''.join(cookies)
|
||||
return cookies
|
||||
|
||||
cookies_file = 'cookies_pmnp.txt'
|
||||
cookies = get_cookies(dirPath + '/cookies/' + cookies_file)
|
||||
pmnp_headers = {
|
||||
'Accept':'application/json, text/plain, */*',
|
||||
'Access-Control-Allow-Origin':'*',
|
||||
'cookie':cookies,
|
||||
'User-Agent':USER_AGENT
|
||||
}
|
||||
|
||||
def mpd_parsing(mpd_url):
|
||||
base_url = mpd_url.split('stream.mpd')[0]
|
||||
r = SESSION.get(url=mpd_url)
|
||||
r.raise_for_status()
|
||||
xml = xmltodict.parse(r.text)
|
||||
mpdf = json.loads(json.dumps(xml))
|
||||
length = isodate.parse_duration(mpdf['MPD']['@mediaPresentationDuration']).total_seconds()
|
||||
tracks = mpdf['MPD']['Period']['AdaptationSet']
|
||||
|
||||
def get_pssh(track):
|
||||
pssh = ''
|
||||
for t in track["ContentProtection"]:
|
||||
if t['@schemeIdUri'].lower() == 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed':
|
||||
pssh = t["cenc:pssh"]
|
||||
return pssh
|
||||
|
||||
def force_instance(x):
|
||||
if isinstance(x['Representation'], list):
|
||||
X = x['Representation']
|
||||
else:
|
||||
X = [x['Representation']]
|
||||
return X
|
||||
|
||||
video_list = []
|
||||
for video_tracks in tracks:
|
||||
if video_tracks['@contentType'] == 'video':
|
||||
pssh = get_pssh(video_tracks)
|
||||
for x in force_instance(video_tracks):
|
||||
try:
|
||||
codecs = x['@codecs']
|
||||
except (KeyError, TypeError):
|
||||
codecs = video_tracks['@codecs']
|
||||
video_dict = {
|
||||
'Height':x['@height'],
|
||||
'Width':x['@width'],
|
||||
'Bandwidth':x['@bandwidth'],
|
||||
'ID':x['@id'],
|
||||
'TID':video_tracks['@id'],
|
||||
'Codec':codecs}
|
||||
video_list.append(video_dict)
|
||||
|
||||
video_list = sorted(video_list, key=(lambda k: int(k['Bandwidth'])))
|
||||
|
||||
while args.customquality != [] and int(video_list[(-1)]['Height']) > int(args.customquality[0]):
|
||||
video_list.pop(-1)
|
||||
|
||||
audio_list = []
|
||||
for audio_tracks in tracks:
|
||||
if audio_tracks['@contentType'] == 'audio':
|
||||
for x in force_instance(audio_tracks):
|
||||
try:
|
||||
codecs = x['@codecs']
|
||||
except (KeyError, TypeError):
|
||||
codecs = audio_tracks['@codecs']
|
||||
audio_dict = {
|
||||
'Bandwidth':x['@bandwidth'],
|
||||
'ID':x['@id'],
|
||||
'TID':audio_tracks['@id'],
|
||||
'Language':replace_code_lang(audio_tracks['@lang']),
|
||||
'Codec':codecs}
|
||||
audio_list.append(audio_dict)
|
||||
|
||||
audio_list = sorted(audio_list, key=(lambda k: (int(k['Bandwidth']), str(k['Language']))), reverse=True)
|
||||
if args.only_2ch_audio:
|
||||
c = 0
|
||||
while c != len(audio_list):
|
||||
if '-3' in audio_list[c]['Codec'].split('=')[0]:
|
||||
audio_list.remove(audio_list[c])
|
||||
else:
|
||||
c += 1
|
||||
|
||||
BitrateList = []
|
||||
AudioLanguageList = []
|
||||
for x in audio_list:
|
||||
BitrateList.append(x['Bandwidth'])
|
||||
AudioLanguageList.append(x['Language'])
|
||||
|
||||
BitrateList = alphanumericSort(list(set(BitrateList)))
|
||||
AudioLanguageList = alphanumericSort(list(set(AudioLanguageList)))
|
||||
audioList_new = []
|
||||
audio_Dict_new = {}
|
||||
for y in AudioLanguageList:
|
||||
counter = 0
|
||||
for x in audio_list:
|
||||
if x['Language'] == y and counter == 0:
|
||||
audio_Dict_new = {
|
||||
'Language':x['Language'],
|
||||
'Bandwidth':x['Bandwidth'],
|
||||
'Codec': x['Codec'],
|
||||
'TID':x['TID'],
|
||||
'ID':x['ID']}
|
||||
audioList_new.append(audio_Dict_new)
|
||||
counter = counter + 1
|
||||
|
||||
audioList = audioList_new
|
||||
audio_list = sorted(audioList, key=(lambda k: (int(k['Bandwidth']), str(k['Language']))))
|
||||
|
||||
audioList_new = []
|
||||
if args.audiolang:
|
||||
for x in audio_list:
|
||||
langAbbrev = x['Language']
|
||||
if langAbbrev in list(args.audiolang):
|
||||
audioList_new.append(x)
|
||||
audio_list = audioList_new
|
||||
|
||||
return base_url, length, video_list, audio_list, [], pssh, mpdf
|
||||
|
||||
def get_episodes(ep_str, num_eps):
|
||||
eps = ep_str.split(',')
|
||||
eps_final = []
|
||||
|
||||
for ep in eps:
|
||||
if '-' in ep:
|
||||
(start, end) = ep.split('-')
|
||||
start = int(start)
|
||||
end = int(end or num_eps)
|
||||
eps_final += list(range(start, end + 1))
|
||||
else:
|
||||
eps_final.append(int(ep))
|
||||
|
||||
return eps_final
|
||||
|
||||
_id = args.url_season.split('/')[-2]
|
||||
if '/video/' in args.url_season:
|
||||
content_regex = r'(\/shows\/)([\w-]+)(\/video\/)([\w-]+)'
|
||||
url_match = re.search(content_regex, args.url_season)
|
||||
_id = url_match[2]
|
||||
|
||||
def get_content_info():
|
||||
if 'shows' in args.url_season:
|
||||
pmnp_season_url = 'https://www.paramountplus.com/shows/{}/xhr/episodes/page/0/size/100/xs/0/season/{}/'.format(_id, '')
|
||||
season_req = requests.get(url=pmnp_season_url, headers=pmnp_headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
|
||||
if not args.season:
|
||||
args.season = 'all'
|
||||
|
||||
seasons = []
|
||||
if args.season:
|
||||
if args.season == 'all':
|
||||
seasons = 'all'
|
||||
elif ',' in args.season:
|
||||
seasons = [int(x) for x in args.season.split(',')]
|
||||
elif '-' in args.season:
|
||||
(start, end) = args.season.split('-')
|
||||
seasons = list(range(int(start), int(end) + 1))
|
||||
else:
|
||||
seasons = [int(args.season)]
|
||||
|
||||
if seasons == 'all':
|
||||
seasons_list = [x['season_number'] for x in season_req.json()['result']['data']]
|
||||
seasons = sorted(set(seasons_list))
|
||||
|
||||
for season_num in seasons:
|
||||
pmnp_season_url = 'https://www.paramountplus.com/shows/{}/xhr/episodes/page/0/size/500/xs/0/season/{}/'.format(_id, season_num)
|
||||
season_req = requests.get(url=pmnp_season_url, headers=pmnp_headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
if season_req.json()['result']['total'] < 1:
|
||||
print('This season doesnt exist!')
|
||||
exit()
|
||||
|
||||
for num, ep in enumerate(season_req.json()['result']['data'], start=1):
|
||||
episodeNumber = ep['episode_number']
|
||||
seasonNumber = ep['season_number']
|
||||
seriesTitle = ReplaceDontLikeWord(ep['series_title'])
|
||||
episodeTitle = ReplaceDontLikeWord(ep['label'])
|
||||
seriesName = f'{seriesTitle} S{seasonNumber:0>2}E{episodeNumber:0>2} - {episodeTitle}'
|
||||
folderName = f'{seriesTitle} S{seasonNumber:0>2}'
|
||||
raw_url = urllib.parse.urljoin('https://www.paramountplus.com', ep['metaData']['contentUrl'])
|
||||
|
||||
episodes_list_new = []
|
||||
episodes_dict = {
|
||||
'id': ep['content_id'],
|
||||
'raw_url': raw_url,
|
||||
'pid':ep['metaData']['pid'],
|
||||
'seriesName':seriesName,
|
||||
'folderName':folderName,
|
||||
'episodeNumber': num,
|
||||
'seasonNumber':seasonNumber,
|
||||
'pmnpType': 'show'}
|
||||
episodes_list_new.append(episodes_dict)
|
||||
episodes_list = []
|
||||
for x in episodes_list_new:
|
||||
episodes_list.append(x)
|
||||
#episodes_list = sorted(episodes_list, key=lambda x: x['episodeNumber'])
|
||||
|
||||
if args.episodeStart:
|
||||
eps = get_episodes(args.episodeStart, len(episodes_list))
|
||||
episodes_list = [x for x in episodes_list if x['episodeNumber'] in eps]
|
||||
|
||||
if 'video' in args.url_season:
|
||||
episodes_list = [x for x in episodes_list if x['id'] in url_match.group(4)]
|
||||
|
||||
for content_json in episodes_list:
|
||||
start_process(content_json)
|
||||
|
||||
if 'movies' in args.url_season:
|
||||
while 1:
|
||||
resp = requests.get(url=args.url_season + '/', headers=pmnp_headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
if resp.ok:
|
||||
break
|
||||
|
||||
html_data = resp
|
||||
html_data = html_data.text.replace('\r\n', '').replace('\n', '').replace('\r', '').replace('\t', '').replace(' ', '')
|
||||
html_data_list = re.split('(</div>)(?i)', html_data)
|
||||
json_web = []
|
||||
for div in html_data_list:
|
||||
if 'player.paramsVO.adCallParams' in div:
|
||||
print()
|
||||
rg = re.compile('(player.metaData = )(.*)(;player.tms_program_id)')
|
||||
m = rg.search(div)
|
||||
if m:
|
||||
json_web = m.group(2)
|
||||
json_web = json.loads(json_web)
|
||||
|
||||
content_dict = {}
|
||||
episodes_list = []
|
||||
year_regex = r'(\d{4})'
|
||||
movieTitle = ReplaceDontLikeWord(json_web['seriesTitle'])
|
||||
try:
|
||||
r = re.search(year_regex, json_web['airdate'])
|
||||
except KeyError:
|
||||
r = re.search(year_regex, json_web['airdate_tv'])
|
||||
seriesName = f'{movieTitle} ({r.group(0)})'
|
||||
|
||||
content_dict = {
|
||||
'id':json_web['contentId'],
|
||||
'raw_url': str(args.url_season),
|
||||
'pid': json_web['pid'],
|
||||
'seriesName':seriesName,
|
||||
'folderName':None,
|
||||
'episodeNumber':1,
|
||||
'seasonNumber':1,
|
||||
'pmnpType': 'movie'}
|
||||
episodes_list.append(content_dict)
|
||||
|
||||
for content_json in episodes_list:
|
||||
start_process(content_json)
|
||||
|
||||
def get_license(id_json):
|
||||
while 1:
|
||||
resp = requests.get(url=id_json['raw_url'], headers=pmnp_headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
if resp.ok:
|
||||
break
|
||||
|
||||
html_data = resp
|
||||
html_data = html_data.text.replace('\r\n', '').replace('\n', '').replace('\r', '').replace('\t', '').replace(' ', '')
|
||||
html_data_list = re.split('(</div>)(?i)', html_data)
|
||||
json_web = []
|
||||
for div in html_data_list:
|
||||
if '(!window.CBS.Registry.drmPromise) {' in div:
|
||||
rg = re.compile('(player.drm = )(.*)(;}player.enableCP)')
|
||||
m = rg.search(div)
|
||||
if m:
|
||||
json_web = m.group(2)
|
||||
json_web = json.loads(json_web)
|
||||
|
||||
lic_url = json_web['widevine']['url']
|
||||
header_auth = json_web['widevine']['header']['Authorization']
|
||||
if not lic_url:
|
||||
print('Too many requests...')
|
||||
return lic_url, header_auth
|
||||
|
||||
global folderdownloader
|
||||
if args.output:
|
||||
if not os.path.exists(args.output):
|
||||
os.makedirs(args.output)
|
||||
os.chdir(args.output)
|
||||
if ":" in str(args.output):
|
||||
folderdownloader = str(args.output).replace('/','\\').replace('.\\','\\')
|
||||
else:
|
||||
folderdownloader = dirPath + str(args.output).replace('/','\\').replace('.\\','\\')
|
||||
else:
|
||||
folderdownloader = dirPath.replace('/','\\').replace('.\\','\\')
|
||||
|
||||
def get_manifest(id_json):
|
||||
api_manifest = 'https://link.theplatform.com/s/dJ5BDC/{}?format=SMIL&manifest=m3u&Tracking=true&mbr=true'.format(id_json['pid'])
|
||||
r = requests.get(url=api_manifest, headers=pmnp_headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
xmls = xmltodict.parse(r.text)
|
||||
smil = json.loads(json.dumps(xmls))
|
||||
videoSrc = []
|
||||
try:
|
||||
for x in smil['smil']['body']['seq']['switch']:
|
||||
videoSrc = x['video']['@src']
|
||||
except Exception:
|
||||
videoSrc = smil['smil']['body']['seq']['switch']['video']['@src']
|
||||
lic_url, header_auth = get_license(id_json)
|
||||
return {'mpd_url': videoSrc, 'license': lic_url, 'lic_header': header_auth}
|
||||
|
||||
def start_process(content_info):
|
||||
drm_info = get_manifest(content_info)
|
||||
base_url, length, video_list, audio_list, subs_list, pssh, xml = mpd_parsing(drm_info['mpd_url'])
|
||||
video_bandwidth = dict(video_list[(-1)])['Bandwidth']
|
||||
video_height = str(dict(video_list[(-1)])['Height'])
|
||||
video_width = str(dict(video_list[(-1)])['Width'])
|
||||
video_codec = str(dict(video_list[(-1)])['Codec'])
|
||||
video_format_id = str(dict(video_list[(-1)])['ID'])
|
||||
video_track_id = str(dict(video_list[(-1)])['TID'])
|
||||
if not args.onlykeys:
|
||||
if not args.novideo:
|
||||
print('\nVIDEO - Bitrate: ' + convert_size(int(video_bandwidth)) + ' - Profile: ' + video_codec.split('=')[0] + ' - Size: ' + get_size(length * float(video_bandwidth) * 0.125) + ' - Dimensions: ' + video_width + 'x' + video_height)
|
||||
print()
|
||||
|
||||
if not args.noaudio:
|
||||
if audio_list != []:
|
||||
for x in audio_list:
|
||||
audio_bandwidth = x['Bandwidth']
|
||||
audio_representation_id = str(x['Codec'])
|
||||
audio_lang = x['Language']
|
||||
print('AUDIO - Bitrate: ' + convert_size(int(audio_bandwidth)) + ' - Profile: ' + audio_representation_id.split('=')[0] + ' - Size: ' + get_size(length * float(audio_bandwidth) * 0.125) + ' - Language: ' + audio_lang)
|
||||
print()
|
||||
|
||||
if not args.nosubs:
|
||||
if subs_list != []:
|
||||
for z in subs_list:
|
||||
sub_lang = z['Language']
|
||||
print('SUBTITLE - Profile: Normal - Language: ' + sub_lang)
|
||||
print()
|
||||
|
||||
print('Name: ' + content_info['seriesName'])
|
||||
|
||||
if content_info['pmnpType'] == 'show':
|
||||
CurrentName = content_info['seriesName']
|
||||
CurrentHeigh = str(video_height)
|
||||
VideoOutputName = folderdownloader + '\\' + str(content_info['folderName']) + '\\' + str(CurrentName) + ' [' + str(CurrentHeigh) + 'p].mkv'
|
||||
else:
|
||||
CurrentName = content_info['seriesName']
|
||||
CurrentHeigh = str(video_height)
|
||||
VideoOutputName = folderdownloader + '\\' + str(CurrentName) + '\\' + ' [' + str(CurrentHeigh) + 'p].mkv'
|
||||
|
||||
if args.onlykeys:
|
||||
keys_all = get_keys(drm_info, pssh)
|
||||
with open(keys_file, 'a', encoding='utf8') as (file):
|
||||
file.write(CurrentName + '\n')
|
||||
print('\n' + CurrentName)
|
||||
for key in keys_all:
|
||||
with open(keys_file, 'a', encoding='utf8') as (file):
|
||||
file.write(key + '\n')
|
||||
print(key)
|
||||
|
||||
else:
|
||||
|
||||
if not args.novideo or (not args.noaudio):
|
||||
print("\nGetting KEYS...")
|
||||
|
||||
try:
|
||||
keys_all = get_keys(drm_info, pssh)
|
||||
except KeyError:
|
||||
print('License request failed, using keys from txt')
|
||||
keys_all = keys_file_txt
|
||||
else:
|
||||
with open(keys_file, "a", encoding="utf8") as file:
|
||||
file.write(CurrentName + "\n")
|
||||
for key in keys_all:
|
||||
with open(keys_file, "a", encoding="utf8") as file:
|
||||
file.write(key + "\n")
|
||||
print("Done!")
|
||||
|
||||
if not os.path.isfile(VideoOutputName):
|
||||
|
||||
if not args.novideo:
|
||||
inputVideo = CurrentName + ' [' + str(CurrentHeigh) + 'p].mp4'
|
||||
if os.path.isfile(inputVideo):
|
||||
print('\n' + inputVideo + '\nFile has already been successfully downloaded previously.\n')
|
||||
else:
|
||||
wvdl_cfg = WvDownloaderConfig(xml, base_url, inputVideo, video_track_id, video_format_id)
|
||||
wvdownloader = WvDownloader(wvdl_cfg)
|
||||
wvdownloader.run()
|
||||
|
||||
if not args.noaudio:
|
||||
for x in audio_list:
|
||||
langAbbrev = x['Language']
|
||||
format_id = x['ID']
|
||||
inputAudio = CurrentName + ' ' + '(' + langAbbrev + ').mp4'
|
||||
inputAudio_demuxed = CurrentName + ' ' + '(' + langAbbrev + ')' + '.m4a'
|
||||
if os.path.isfile(inputAudio) or os.path.isfile(inputAudio_demuxed):
|
||||
print('\n' + inputAudio + '\nFile has already been successfully downloaded previously.\n')
|
||||
else:
|
||||
wvdl_cfg = WvDownloaderConfig(xml, base_url, inputAudio, x['TID'], x['ID'])
|
||||
wvdownloader = WvDownloader(wvdl_cfg)
|
||||
wvdownloader.run()
|
||||
|
||||
CorrectDecryptVideo = False
|
||||
if not args.novideo:
|
||||
inputVideo = CurrentName + ' [' + str(CurrentHeigh) + 'p].mp4'
|
||||
if os.path.isfile(inputVideo):
|
||||
CorrectDecryptVideo = DecryptVideo(inputVideo=inputVideo, keys_video=keys_all)
|
||||
else:
|
||||
CorrectDecryptVideo = True
|
||||
|
||||
CorrectDecryptAudio = False
|
||||
if not args.noaudio:
|
||||
for x in audio_list:
|
||||
langAbbrev = x['Language']
|
||||
inputAudio = CurrentName + ' ' + '(' + langAbbrev + ')' + '.mp4'
|
||||
if os.path.isfile(inputAudio):
|
||||
CorrectDecryptAudio = DecryptAudio(inputAudio=inputAudio, keys_audio=keys_all)
|
||||
else:
|
||||
CorrectDecryptAudio = True
|
||||
|
||||
if not args.nomux:
|
||||
if not args.novideo:
|
||||
if not args.noaudio:
|
||||
if CorrectDecryptVideo == True:
|
||||
if CorrectDecryptAudio == True:
|
||||
print('\nMuxing...')
|
||||
|
||||
pmnpType = content_info['pmnpType']
|
||||
folderName = content_info['folderName']
|
||||
|
||||
if pmnpType=="show":
|
||||
MKV_Muxer=Muxer(CurrentName=CurrentName,
|
||||
SeasonFolder=folderName,
|
||||
CurrentHeigh=CurrentHeigh,
|
||||
Type=pmnpType,
|
||||
mkvmergeexe=pmnp_cfg.MKVMERGE)
|
||||
|
||||
else:
|
||||
MKV_Muxer=Muxer(CurrentName=CurrentName,
|
||||
SeasonFolder=None,
|
||||
CurrentHeigh=CurrentHeigh,
|
||||
Type=pmnpType,
|
||||
mkvmergeexe=pmnp_cfg.MKVMERGE)
|
||||
|
||||
MKV_Muxer.mkvmerge_muxer(lang="English")
|
||||
|
||||
if args.tag:
|
||||
inputName = CurrentName + ' [' + CurrentHeigh + 'p].mkv'
|
||||
release_group(base_filename=inputName,
|
||||
default_filename=CurrentName,
|
||||
folder_name=folderName,
|
||||
type=pmnpType,
|
||||
video_height=CurrentHeigh)
|
||||
|
||||
if not args.keep:
|
||||
for f in os.listdir():
|
||||
if re.fullmatch(re.escape(CurrentName) + r'.*\.(mp4|m4a|h264|h265|eac3|srt|txt|avs|lwi|mpd)', f):
|
||||
os.remove(f)
|
||||
print("Done!")
|
||||
else:
|
||||
print("\nFile '" + str(VideoOutputName) + "' already exists.")
|
||||
|
||||
def release_group(base_filename, default_filename, folder_name, type, video_height):
|
||||
if type=='show':
|
||||
video_mkv = os.path.join(folder_name, base_filename)
|
||||
else:
|
||||
video_mkv = base_filename
|
||||
|
||||
mediainfo = mediainfo_(video_mkv)
|
||||
for v in mediainfo['media']['track']: # mediainfo do video
|
||||
if v['@type'] == 'Video':
|
||||
video_format = v['Format']
|
||||
|
||||
video_codec = ''
|
||||
if video_format == "AVC":
|
||||
video_codec = 'H.264'
|
||||
elif video_format == "HEVC":
|
||||
video_codec = 'H.265'
|
||||
|
||||
for m in mediainfo['media']['track']: # mediainfo do audio
|
||||
if m['@type'] == 'Audio':
|
||||
codec_name = m['Format']
|
||||
channels_number = m['Channels']
|
||||
|
||||
audio_codec = ''
|
||||
audio_channels = ''
|
||||
if codec_name == "AAC":
|
||||
audio_codec = 'AAC'
|
||||
elif codec_name == "AC-3":
|
||||
audio_codec = "DD"
|
||||
elif codec_name == "E-AC-3":
|
||||
audio_codec = "DDP"
|
||||
elif codec_name == "E-AC-3 JOC":
|
||||
audio_codec = "ATMOS"
|
||||
|
||||
if channels_number == "2":
|
||||
audio_channels = "2.0"
|
||||
elif channels_number == "6":
|
||||
audio_channels = "5.1"
|
||||
|
||||
audio_ = audio_codec + audio_channels
|
||||
|
||||
# renomear arquivo
|
||||
default_filename = default_filename.replace('&', '.and.')
|
||||
default_filename = re.sub(r'[]!"#$%\'()*+,:;<=>?@\\^_`{|}~[-]', '', default_filename)
|
||||
default_filename = default_filename.replace(' ', '.')
|
||||
default_filename = re.sub(r'\.{2,}', '.', default_filename)
|
||||
default_filename = unidecode(default_filename)
|
||||
|
||||
output_name = '{}.{}p.PMNP.WEB-DL.{}.{}-{}'.format(default_filename, video_height, audio_, video_codec, args.tag)
|
||||
if type=='show':
|
||||
outputName = os.path.join(folder_name, output_name + '.mkv')
|
||||
else:
|
||||
outputName = output_name + '.mkv'
|
||||
|
||||
os.rename(video_mkv, outputName)
|
||||
print("{} -> {}".format(base_filename, output_name))
|
||||
|
||||
from pywidevine.decrypt.wvdecryptcustom import WvDecrypt
|
||||
from pywidevine.cdm import cdm, deviceconfig
|
||||
|
||||
def do_decrypt(init_data_b64, cert_data_b64, device, licurl, licheader):
|
||||
wvdecrypt = WvDecrypt(init_data_b64=init_data_b64, cert_data_b64=cert_data_b64, device=deviceconfig.device_android_generic)
|
||||
chal = wvdecrypt.get_challenge()
|
||||
headers = {
|
||||
'authorization': licheader
|
||||
}
|
||||
|
||||
try:
|
||||
license_res = requests.Session().post(url=licurl, data=chal, headers=headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
license_base64 = base64.b64encode(license_res.content)
|
||||
except Exception:
|
||||
print(license_res.text)
|
||||
license_base64 = "Error"
|
||||
return license_base64
|
||||
if license_base64 != 'Error':
|
||||
wvdecrypt.update_license(license_base64)
|
||||
wvdecrypt.start_process()
|
||||
Correct, keyswvdecrypt = wvdecrypt.start_process()
|
||||
return Correct, keyswvdecrypt
|
||||
else:
|
||||
keyswvdecrypt = []
|
||||
Correct = True
|
||||
return Correct, keyswvdecrypt
|
||||
|
||||
def get_keys(lic_info, pssh):
|
||||
Correct = False
|
||||
keys_new = []
|
||||
device = deviceconfig.DeviceConfig(deviceconfig.device_android_generic)
|
||||
while Correct is False:
|
||||
Correct, keys_new = do_decrypt(init_data_b64=bytes(pssh.encode()), cert_data_b64=None, device=device, licurl=lic_info['license'], licheader=lic_info['lic_header'])
|
||||
return keys_new
|
||||
|
||||
def DecryptAudio(inputAudio, keys_audio):
|
||||
key_audio_id_original = getKeyId(inputAudio)
|
||||
outputAudioTemp = inputAudio.replace(".mp4", "_dec.mp4")
|
||||
if key_audio_id_original != "nothing":
|
||||
for key in keys_audio:
|
||||
key_id=key[0:32]
|
||||
if key_id == key_audio_id_original:
|
||||
print("\nDecrypting audio...")
|
||||
print ("Using KEY: " + key)
|
||||
wvdecrypt_process = subprocess.Popen([pmnp_cfg.MP4DECRYPT, "--show-progress", "--key", key, inputAudio, outputAudioTemp])
|
||||
stdoutdata, stderrdata = wvdecrypt_process.communicate()
|
||||
wvdecrypt_process.wait()
|
||||
time.sleep (50.0/1000.0)
|
||||
os.remove(inputAudio)
|
||||
print("\nDemuxing audio...")
|
||||
mediainfo = MediaInfo.parse(outputAudioTemp)
|
||||
audio_info = next(x for x in mediainfo.tracks if x.track_type == "Audio")
|
||||
codec_name = audio_info.format
|
||||
|
||||
ext = ''
|
||||
if codec_name == "AAC":
|
||||
ext = '.m4a'
|
||||
elif codec_name == "E-AC-3":
|
||||
ext = ".eac3"
|
||||
elif codec_name == "AC-3":
|
||||
ext = ".ac3"
|
||||
outputAudio = outputAudioTemp.replace("_dec.mp4", ext)
|
||||
print("{} -> {}".format(outputAudioTemp, outputAudio))
|
||||
ff = ffmpy.FFmpeg(executable=pmnp_cfg.FFMPEG, inputs={outputAudioTemp: None}, outputs={outputAudio: '-c copy'}, global_options="-y -hide_banner -loglevel warning")
|
||||
ff.run()
|
||||
time.sleep (50.0/1000.0)
|
||||
os.remove(outputAudioTemp)
|
||||
print("Done!")
|
||||
return True
|
||||
|
||||
elif key_audio_id_original == "nothing":
|
||||
return True
|
||||
|
||||
def DecryptVideo(inputVideo, keys_video):
|
||||
key_video_id_original = getKeyId(inputVideo)
|
||||
inputVideo = inputVideo
|
||||
outputVideoTemp = inputVideo.replace('.mp4', '_dec.mp4')
|
||||
outputVideo = inputVideo
|
||||
if key_video_id_original != 'nothing':
|
||||
for key in keys_video:
|
||||
key_id = key[0:32]
|
||||
if key_id == key_video_id_original:
|
||||
print('\nDecrypting video...')
|
||||
print('Using KEY: ' + key)
|
||||
wvdecrypt_process = subprocess.Popen([pmnp_cfg.MP4DECRYPT, '--show-progress', '--key', key, inputVideo, outputVideoTemp])
|
||||
stdoutdata, stderrdata = wvdecrypt_process.communicate()
|
||||
wvdecrypt_process.wait()
|
||||
print('\nRemuxing video...')
|
||||
ff = ffmpy.FFmpeg(executable=pmnp_cfg.FFMPEG, inputs={outputVideoTemp: None}, outputs={outputVideo: '-c copy'}, global_options='-y -hide_banner -loglevel warning')
|
||||
ff.run()
|
||||
time.sleep(0.05)
|
||||
os.remove(outputVideoTemp)
|
||||
print('Done!')
|
||||
return True
|
||||
|
||||
elif key_video_id_original == 'nothing':
|
||||
return True
|
||||
|
||||
def DemuxAudio(inputAudio):
|
||||
if os.path.isfile(inputAudio):
|
||||
print('\nDemuxing audio...')
|
||||
mediainfo = mediainfo_(inputAudio)
|
||||
for m in mediainfo['media']['track']:
|
||||
if m['@type'] == 'Audio':
|
||||
codec_name = m['Format']
|
||||
try:
|
||||
codec_tag_string = m['Format_Commercial_IfAny']
|
||||
except Exception:
|
||||
codec_tag_string = ''
|
||||
|
||||
ext = ''
|
||||
if codec_name == 'AAC':
|
||||
ext = '.m4a'
|
||||
else:
|
||||
if codec_name == 'E-AC-3':
|
||||
ext = '.eac3'
|
||||
else:
|
||||
if codec_name == 'AC-3':
|
||||
ext = '.ac3'
|
||||
outputAudio = inputAudio.replace('.mp4', ext)
|
||||
print('{} -> {}'.format(inputAudio, outputAudio))
|
||||
ff = ffmpy.FFmpeg(executable=pmnp_cfg.FFMPEG,
|
||||
inputs={inputAudio: None},
|
||||
outputs={outputAudio: '-c copy'},
|
||||
global_options='-y -hide_banner -loglevel warning')
|
||||
ff.run()
|
||||
time.sleep(0.05)
|
||||
os.remove(inputAudio)
|
||||
print('Done!')
|
||||
|
||||
get_content_info()
|
|
@ -0,0 +1,789 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Module: Paramount Plus Downloader
|
||||
# Created on: 19-02-2021
|
||||
# Authors: JUNi
|
||||
# Version: 2.0
|
||||
|
||||
import urllib.parse
|
||||
import re, base64, requests, sys, os
|
||||
import subprocess, shutil
|
||||
import xmltodict, isodate
|
||||
import json, ffmpy
|
||||
import http, html, time
|
||||
|
||||
from unidecode import unidecode
|
||||
from http.cookiejar import MozillaCookieJar
|
||||
from titlecase import titlecase
|
||||
|
||||
from pywidevine.clients.proxy_config import ProxyConfig
|
||||
from pywidevine.muxer.muxer import Muxer
|
||||
|
||||
from pywidevine.clients.paramountplus.downloader import WvDownloader
|
||||
from pywidevine.clients.paramountplus.config import WvDownloaderConfig
|
||||
|
||||
|
||||
currentFile = 'paramountplus'
|
||||
realPath = os.path.realpath(currentFile)
|
||||
dirPath = os.path.dirname(realPath)
|
||||
USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36'
|
||||
SESSION = requests.Session()
|
||||
|
||||
def main(args):
|
||||
global _id
|
||||
|
||||
mp4decryptexe = "mp4decrypt"
|
||||
mp4dumptexe = "mp4dump"
|
||||
ffmpegpath = "ffmpeg"
|
||||
mkvmergeexe = "mkvmerge"
|
||||
MediaInfo_exe = shutil.which("mediainfo") or shutil.which("MediaInfo")
|
||||
|
||||
proxies = {}
|
||||
proxy_meta = args.proxy
|
||||
if proxy_meta == 'none':
|
||||
proxies['meta'] = {'http': None, 'https': None}
|
||||
elif proxy_meta:
|
||||
proxies['meta'] = {'http': proxy_meta, 'https': proxy_meta}
|
||||
SESSION.proxies = proxies.get('meta')
|
||||
proxy_cfg = ProxyConfig(proxies)
|
||||
|
||||
if not os.path.exists(dirPath + '/KEYS'):
|
||||
os.makedirs(dirPath + '/KEYS')
|
||||
else:
|
||||
keys_file = dirPath + '/KEYS/PARAMOUNTPLUS.txt'
|
||||
try:
|
||||
keys_file_pmnp = open(keys_file, 'r')
|
||||
keys_file_txt = keys_file_pmnp.readlines()
|
||||
except Exception:
|
||||
with open(keys_file, 'a', encoding='utf8') as (file):
|
||||
file.write('##### One KEY per line. #####\n')
|
||||
keys_file_pmnp = open(keys_file, 'r', encoding='utf8')
|
||||
keys_file_txt = keys_file_pmnp.readlines()
|
||||
|
||||
def alphanumericSort(l):
|
||||
def convert(text):
|
||||
if text.isdigit():
|
||||
return int(text)
|
||||
else:
|
||||
return text
|
||||
|
||||
def alphanum_key(key):
|
||||
return [convert(c) for c in re.split('([0-9]+)', key)]
|
||||
|
||||
return sorted(l, key=alphanum_key)
|
||||
|
||||
def convert_size(size_bytes):
|
||||
if size_bytes == 0:
|
||||
return '0bps'
|
||||
else:
|
||||
s = round(size_bytes / 1000, 0)
|
||||
return '%ikbps' % s
|
||||
|
||||
def get_size(size):
|
||||
power = 1024
|
||||
n = 0
|
||||
Dic_powerN = {0:'', 1:'K', 2:'M', 3:'G', 4:'T'}
|
||||
while size > power:
|
||||
size /= power
|
||||
n += 1
|
||||
return str(round(size, 2)) + Dic_powerN[n] + 'B'
|
||||
|
||||
def getKeyId(name):
|
||||
mp4dump = subprocess.Popen([mp4dumptexe, name], stdout=(subprocess.PIPE))
|
||||
mp4dump = str(mp4dump.stdout.read())
|
||||
A = find_str(mp4dump, 'default_KID')
|
||||
KEY_ID_ORI = ''
|
||||
KEY_ID_ORI = mp4dump[A:A + 63].replace('default_KID = ', '').replace('[', '').replace(']', '').replace(' ', '')
|
||||
if KEY_ID_ORI == '' or KEY_ID_ORI == "'":
|
||||
KEY_ID_ORI = 'nothing'
|
||||
return KEY_ID_ORI
|
||||
|
||||
def find_str(s, char):
|
||||
index = 0
|
||||
if char in s:
|
||||
c = char[0]
|
||||
for ch in s:
|
||||
if ch == c:
|
||||
if s[index:index + len(char)] == char:
|
||||
return index
|
||||
index += 1
|
||||
|
||||
return -1
|
||||
|
||||
def mediainfo_(file):
|
||||
mediainfo_output = subprocess.Popen([MediaInfo_exe, '--Output=JSON', '-f', file], stdout=(subprocess.PIPE))
|
||||
mediainfo_json = json.load(mediainfo_output.stdout)
|
||||
return mediainfo_json
|
||||
|
||||
def ReplaceDontLikeWord(X):
|
||||
try:
|
||||
X = X.replace(" : ", " - ").replace(": ", " - ").replace(":", " - ").replace("&", "and").replace("+", "").replace(";", "").replace("ó", "o").\
|
||||
replace("[", "").replace("'", "").replace("]", "").replace("/", "").replace("//", "").\
|
||||
replace("’", "'").replace("*", "x").replace("<", "").replace(">", "").replace("|", "").\
|
||||
replace("~", "").replace("#", "").replace("%", "").replace("{", "").replace("}", "").replace(",", "").\
|
||||
replace("?","").replace("¿","")
|
||||
except Exception:
|
||||
X = X.replace(" : ", " - ").replace(": ", " - ").replace(":", " - ").replace("&", "and").replace("+", "").replace(";", "").\
|
||||
replace("ó", "o").replace("[", "").replace("'", "").replace("]", "").replace("/", "").\
|
||||
replace("//", "").replace("’", "'").replace("*", "x").replace("<", "").replace(">", "").replace("|", "").\
|
||||
replace("~", "").replace("#", "").replace("%", "").replace("{", "").replace("}", "").replace(",", "").\
|
||||
replace("?","").replace("¿","")
|
||||
|
||||
return titlecase(X)
|
||||
|
||||
def replace_code_lang(X):
|
||||
X = X.lower()
|
||||
X = X.replace('es-mx', 'es-la').replace('pt-BR', 'pt-br').replace('dolby digital', 'en').replace('dd+', 'en')
|
||||
return X
|
||||
|
||||
def get_cookies(file_path):
|
||||
try:
|
||||
cj = http.cookiejar.MozillaCookieJar(file_path)
|
||||
cj.load()
|
||||
except Exception:
|
||||
print('\nCookies not found! Please dump the cookies with the Chrome extension https://chrome.google.com/webstore/detail/cookiestxt/njabckikapfpffapmjgojcnbfjonfjfg and place the generated file in ' + file_path)
|
||||
print('\nWarning, do not click on "download all cookies", you have to click on "click here".\n')
|
||||
sys.exit(0)
|
||||
|
||||
cookies = str()
|
||||
for cookie in cj:
|
||||
cookie.value = urllib.parse.unquote(html.unescape(cookie.value))
|
||||
cookies = cookies + cookie.name + '=' + cookie.value + ';'
|
||||
|
||||
cookies = list(cookies)
|
||||
del cookies[-1]
|
||||
cookies = ''.join(cookies)
|
||||
return cookies
|
||||
|
||||
cookies_file = 'cookies_pmnp.txt'
|
||||
cookies = get_cookies(dirPath + '/cookies/' + cookies_file)
|
||||
pmnp_headers = {
|
||||
'Accept':'application/json, text/plain, */*',
|
||||
'Access-Control-Allow-Origin':'*',
|
||||
'cookie':cookies,
|
||||
'User-Agent':USER_AGENT
|
||||
}
|
||||
|
||||
def mpd_parsing(mpd_url):
|
||||
base_url = mpd_url.split('stream.mpd')[0]
|
||||
r = SESSION.get(url=mpd_url)
|
||||
r.raise_for_status()
|
||||
xml = xmltodict.parse(r.text)
|
||||
mpdf = json.loads(json.dumps(xml))
|
||||
length = isodate.parse_duration(mpdf['MPD']['@mediaPresentationDuration']).total_seconds()
|
||||
tracks = mpdf['MPD']['Period']['AdaptationSet']
|
||||
|
||||
def get_pssh(track):
|
||||
pssh = ''
|
||||
for t in track["ContentProtection"]:
|
||||
if t['@schemeIdUri'].lower() == 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed':
|
||||
pssh = t["cenc:pssh"]
|
||||
return pssh
|
||||
|
||||
def force_instance(x):
|
||||
if isinstance(x['Representation'], list):
|
||||
X = x['Representation']
|
||||
else:
|
||||
X = [x['Representation']]
|
||||
return X
|
||||
|
||||
video_list = []
|
||||
for video_tracks in tracks:
|
||||
if video_tracks['@contentType'] == 'video':
|
||||
pssh = get_pssh(video_tracks)
|
||||
for x in force_instance(video_tracks):
|
||||
try:
|
||||
codecs = x['@codecs']
|
||||
except (KeyError, TypeError):
|
||||
codecs = video_tracks['@codecs']
|
||||
video_dict = {
|
||||
'Height':x['@height'],
|
||||
'Width':x['@width'],
|
||||
'Bandwidth':x['@bandwidth'],
|
||||
'ID':x['@id'],
|
||||
'TID':video_tracks['@id'],
|
||||
'Codec':codecs}
|
||||
video_list.append(video_dict)
|
||||
|
||||
video_list = sorted(video_list, key=(lambda k: int(k['Bandwidth'])))
|
||||
|
||||
while args.customquality != [] and int(video_list[(-1)]['Height']) > int(args.customquality[0]):
|
||||
video_list.pop(-1)
|
||||
|
||||
audio_list = []
|
||||
for audio_tracks in tracks:
|
||||
if audio_tracks['@contentType'] == 'audio':
|
||||
for x in force_instance(audio_tracks):
|
||||
try:
|
||||
codecs = x['@codecs']
|
||||
except (KeyError, TypeError):
|
||||
codecs = audio_tracks['@codecs']
|
||||
audio_dict = {
|
||||
'Bandwidth':x['@bandwidth'],
|
||||
'ID':audio_tracks['@id'],
|
||||
'TID':audio_tracks['@id'],
|
||||
'Language':replace_code_lang(audio_tracks['@lang']),
|
||||
'Codec':codecs}
|
||||
audio_list.append(audio_dict)
|
||||
|
||||
audio_list = sorted(audio_list, key=(lambda k: (int(k['Bandwidth']), str(k['Language']))), reverse=True)
|
||||
if args.only_2ch_audio:
|
||||
c = 0
|
||||
while c != len(audio_list):
|
||||
if '-3' in audio_list[c]['Codec'].split('=')[0]:
|
||||
audio_list.remove(audio_list[c])
|
||||
else:
|
||||
c += 1
|
||||
|
||||
BitrateList = []
|
||||
AudioLanguageList = []
|
||||
for x in audio_list:
|
||||
BitrateList.append(x['Bandwidth'])
|
||||
AudioLanguageList.append(x['Language'])
|
||||
|
||||
BitrateList = alphanumericSort(list(set(BitrateList)))
|
||||
AudioLanguageList = alphanumericSort(list(set(AudioLanguageList)))
|
||||
audioList_new = []
|
||||
audio_Dict_new = {}
|
||||
for y in AudioLanguageList:
|
||||
counter = 0
|
||||
for x in audio_list:
|
||||
if x['Language'] == y and counter == 0:
|
||||
audio_Dict_new = {
|
||||
'Language':x['Language'],
|
||||
'Bandwidth':x['Bandwidth'],
|
||||
'Codec': x['Codec'],
|
||||
'TID':x['TID'],
|
||||
'ID':x['ID']}
|
||||
audioList_new.append(audio_Dict_new)
|
||||
counter = counter + 1
|
||||
|
||||
audioList = audioList_new
|
||||
audio_list = sorted(audioList, key=(lambda k: (int(k['Bandwidth']), str(k['Language']))))
|
||||
|
||||
audioList_new = []
|
||||
if args.audiolang:
|
||||
for x in audio_list:
|
||||
langAbbrev = x['Language']
|
||||
if langAbbrev in list(args.audiolang):
|
||||
audioList_new.append(x)
|
||||
audio_list = audioList_new
|
||||
|
||||
return base_url, length, video_list, audio_list, pssh, xml
|
||||
|
||||
def get_episodes(ep_str, num_eps):
|
||||
eps = ep_str.split(',')
|
||||
eps_final = []
|
||||
|
||||
for ep in eps:
|
||||
if '-' in ep:
|
||||
(start, end) = ep.split('-')
|
||||
start = int(start)
|
||||
end = int(end or num_eps)
|
||||
eps_final += list(range(start, end + 1))
|
||||
else:
|
||||
eps_final.append(int(ep))
|
||||
|
||||
return eps_final
|
||||
|
||||
_id = args.url_season.split('/')[-2]
|
||||
if '/video/' in args.url_season:
|
||||
content_regex = r'(\/shows\/)([\w-]+)(\/video\/)([\w-]+)'
|
||||
url_match = re.search(content_regex, args.url_season)
|
||||
_id = url_match[2]
|
||||
|
||||
def get_content_info():
|
||||
if 'shows' in args.url_season:
|
||||
pmnp_season_url = 'https://www.paramountplus.com/shows/{}/xhr/episodes/page/0/size/100/xs/0/season/{}/'.format(_id, '')
|
||||
season_req = requests.get(url=pmnp_season_url, headers=pmnp_headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
|
||||
if not args.season:
|
||||
args.season = 'all'
|
||||
|
||||
seasons = []
|
||||
if args.season:
|
||||
if args.season == 'all':
|
||||
seasons = 'all'
|
||||
elif ',' in args.season:
|
||||
seasons = [int(x) for x in args.season.split(',')]
|
||||
elif '-' in args.season:
|
||||
(start, end) = args.season.split('-')
|
||||
seasons = list(range(int(start), int(end) + 1))
|
||||
else:
|
||||
seasons = [int(args.season)]
|
||||
|
||||
if seasons == 'all':
|
||||
seasons_list = [x['season_number'] for x in season_req.json()['result']['data']]
|
||||
seasons = sorted(set(seasons_list))
|
||||
|
||||
for season_num in seasons:
|
||||
pmnp_season_url = 'https://www.paramountplus.com/shows/{}/xhr/episodes/page/0/size/500/xs/0/season/{}/'.format(_id, season_num)
|
||||
season_req = requests.get(url=pmnp_season_url, headers=pmnp_headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
if season_req.json()['result']['total'] < 1:
|
||||
print('This season doesnt exist!')
|
||||
exit()
|
||||
|
||||
for num, ep in enumerate(season_req.json()['result']['data'], start=1):
|
||||
episodeNumber = ep['episode_number']
|
||||
seasonNumber = ep['season_number']
|
||||
seriesTitle = ReplaceDontLikeWord(ep['series_title'])
|
||||
episodeTitle = ReplaceDontLikeWord(ep['label'])
|
||||
seriesName = f'{seriesTitle} S{seasonNumber:0>2}E{episodeNumber:0>2} - {episodeTitle}'
|
||||
folderName = f'{seriesTitle} S{seasonNumber:0>2}'
|
||||
raw_url = urllib.parse.urljoin('https://www.paramountplus.com', ep['metaData']['contentUrl'])
|
||||
|
||||
episodes_list_new = []
|
||||
episodes_dict = {
|
||||
'id': ep['content_id'],
|
||||
'raw_url': raw_url,
|
||||
'pid':ep['metaData']['pid'],
|
||||
'seriesName':seriesName,
|
||||
'folderName':folderName,
|
||||
'episodeNumber': num,
|
||||
'seasonNumber':seasonNumber,
|
||||
'pmnpType': 'show'}
|
||||
episodes_list_new.append(episodes_dict)
|
||||
episodes_list = []
|
||||
for x in episodes_list_new:
|
||||
episodes_list.append(x)
|
||||
#episodes_list = sorted(episodes_list, key=lambda x: x['episodeNumber'])
|
||||
|
||||
if args.episodeStart:
|
||||
eps = get_episodes(args.episodeStart, len(episodes_list))
|
||||
episodes_list = [x for x in episodes_list if x['episodeNumber'] in eps]
|
||||
|
||||
if 'video' in args.url_season:
|
||||
episodes_list = [x for x in episodes_list if x['id'] in url_match.group(4)]
|
||||
|
||||
for content_json in episodes_list:
|
||||
start_process(content_json)
|
||||
|
||||
if 'movies' in args.url_season:
|
||||
while 1:
|
||||
resp = requests.get(url=args.url_season + '/', headers=pmnp_headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
if resp.ok:
|
||||
break
|
||||
|
||||
html_data = resp
|
||||
html_data = html_data.text.replace('\r\n', '').replace('\n', '').replace('\r', '').replace('\t', '').replace(' ', '')
|
||||
html_data_list = re.split('(</div>)(?i)', html_data)
|
||||
json_web = []
|
||||
for div in html_data_list:
|
||||
if 'player.paramsVO.adCallParams' in div:
|
||||
print()
|
||||
rg = re.compile('(player.metaData = )(.*)(;player.tms_program_id)')
|
||||
m = rg.search(div)
|
||||
if m:
|
||||
json_web = m.group(2)
|
||||
json_web = json.loads(json_web)
|
||||
|
||||
content_dict = {}
|
||||
episodes_list = []
|
||||
year_regex = r'(\d{4})'
|
||||
movieTitle = ReplaceDontLikeWord(json_web['seriesTitle'])
|
||||
try:
|
||||
r = re.search(year_regex, json_web['airdate'])
|
||||
except KeyError:
|
||||
r = re.search(year_regex, json_web['airdate_tv'])
|
||||
seriesName = f'{movieTitle} ({r.group(0)})'
|
||||
|
||||
content_dict = {
|
||||
'id':json_web['contentId'],
|
||||
'raw_url': str(args.url_season),
|
||||
'pid': json_web['pid'],
|
||||
'seriesName':seriesName,
|
||||
'folderName':None,
|
||||
'episodeNumber':1,
|
||||
'seasonNumber':1,
|
||||
'pmnpType': 'movie'}
|
||||
episodes_list.append(content_dict)
|
||||
|
||||
for content_json in episodes_list:
|
||||
start_process(content_json)
|
||||
|
||||
def get_license(id_json):
|
||||
while 1:
|
||||
resp = requests.get(url=id_json['raw_url'], headers=pmnp_headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
if resp.ok:
|
||||
break
|
||||
|
||||
html_data = resp
|
||||
html_data = html_data.text.replace('\r\n', '').replace('\n', '').replace('\r', '').replace('\t', '').replace(' ', '')
|
||||
html_data_list = re.split('(</div>)(?i)', html_data)
|
||||
json_web = []
|
||||
for div in html_data_list:
|
||||
if '(!window.CBS.Registry.drmPromise) {' in div:
|
||||
rg = re.compile('(player.drm = )(.*)(;}player.enableCP)')
|
||||
m = rg.search(div)
|
||||
if m:
|
||||
json_web = m.group(2)
|
||||
json_web = json.loads(json_web)
|
||||
|
||||
lic_url = json_web['widevine']['url']
|
||||
header_auth = json_web['widevine']['header']['Authorization']
|
||||
if not lic_url:
|
||||
print('Too many requests...')
|
||||
return lic_url, header_auth
|
||||
|
||||
global folderdownloader
|
||||
if args.output:
|
||||
if not os.path.exists(args.output):
|
||||
os.makedirs(args.output)
|
||||
os.chdir(args.output)
|
||||
if ":" in str(args.output):
|
||||
folderdownloader = str(args.output).replace('/','\\').replace('.\\','\\')
|
||||
else:
|
||||
folderdownloader = dirPath + str(args.output).replace('/','\\').replace('.\\','\\')
|
||||
else:
|
||||
folderdownloader = dirPath.replace('/','\\').replace('.\\','\\')
|
||||
|
||||
def get_manifest(id_json):
|
||||
api_manifest = 'https://link.theplatform.com/s/dJ5BDC/{}?format=SMIL&manifest=m3u&Tracking=true&mbr=true'.format(id_json['pid'])
|
||||
r = requests.get(url=api_manifest, headers=pmnp_headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
xmls = xmltodict.parse(r.text)
|
||||
smil = json.loads(json.dumps(xmls))
|
||||
videoSrc = []
|
||||
try:
|
||||
for x in smil['smil']['body']['seq']['switch']:
|
||||
videoSrc = x['video']['@src']
|
||||
except Exception:
|
||||
videoSrc = smil['smil']['body']['seq']['switch']['video']['@src']
|
||||
lic_url, header_auth = get_license(id_json)
|
||||
return {'mpd_url': videoSrc, 'license': lic_url, 'lic_header': header_auth}
|
||||
|
||||
def start_process(content_info):
|
||||
drm_info = get_manifest(content_info)
|
||||
base_url, length, video_list, audio_list, pssh, xml = mpd_parsing(drm_info['mpd_url'])
|
||||
video_bandwidth = dict(video_list[(-1)])['Bandwidth']
|
||||
video_height = str(dict(video_list[(-1)])['Height'])
|
||||
video_width = str(dict(video_list[(-1)])['Width'])
|
||||
video_codec = str(dict(video_list[(-1)])['Codec'])
|
||||
video_format_id = str(dict(video_list[(-1)])['ID'])
|
||||
video_track_id = str(dict(video_list[(-1)])['TID'])
|
||||
if not args.onlykeys:
|
||||
if not args.novideo:
|
||||
print('\nVIDEO - Bitrate: ' + convert_size(int(video_bandwidth)) + ' - Profile: ' + video_codec.split('=')[0] + ' - Size: ' + get_size(length * float(video_bandwidth) * 0.125) + ' - Dimensions: ' + video_width + 'x' + video_height)
|
||||
print()
|
||||
|
||||
if not args.noaudio:
|
||||
if audio_list != []:
|
||||
for x in audio_list:
|
||||
audio_bandwidth = x['Bandwidth']
|
||||
audio_representation_id = str(x['Codec'])
|
||||
audio_lang = x['Language']
|
||||
print('AUDIO - Bitrate: ' + convert_size(int(audio_bandwidth)) + ' - Profile: ' + audio_representation_id.split('=')[0] + ' - Size: ' + get_size(length * float(audio_bandwidth) * 0.125) + ' - Language: ' + audio_lang)
|
||||
print()
|
||||
|
||||
print('Name: ' + content_info['seriesName'])
|
||||
|
||||
if content_info['pmnpType'] == 'show':
|
||||
CurrentName = content_info['seriesName']
|
||||
CurrentHeigh = str(video_height)
|
||||
VideoOutputName = folderdownloader + '\\' + str(content_info['folderName']) + '\\' + str(CurrentName) + ' [' + str(CurrentHeigh) + 'p].mkv'
|
||||
else:
|
||||
CurrentName = content_info['seriesName']
|
||||
CurrentHeigh = str(video_height)
|
||||
VideoOutputName = folderdownloader + '\\' + str(CurrentName) + '\\' + ' [' + str(CurrentHeigh) + 'p].mkv'
|
||||
|
||||
if args.onlykeys:
|
||||
keys_all = get_keys(drm_info, pssh)
|
||||
with open(keys_file, 'a', encoding='utf8') as (file):
|
||||
file.write(CurrentName + '\n')
|
||||
print('\n' + CurrentName)
|
||||
for key in keys_all:
|
||||
with open(keys_file, 'a', encoding='utf8') as (file):
|
||||
file.write(key + '\n')
|
||||
print(key)
|
||||
|
||||
else:
|
||||
|
||||
if not args.novideo or (not args.noaudio):
|
||||
print("\nGetting KEYS...")
|
||||
|
||||
try:
|
||||
keys_all = get_keys(drm_info, pssh)
|
||||
except KeyError:
|
||||
print('License request failed, using keys from txt')
|
||||
keys_all = keys_file_txt
|
||||
else:
|
||||
with open(keys_file, "a", encoding="utf8") as file:
|
||||
file.write(CurrentName + "\n")
|
||||
for key in keys_all:
|
||||
with open(keys_file, "a", encoding="utf8") as file:
|
||||
file.write(key + "\n")
|
||||
print("Done!")
|
||||
|
||||
if not os.path.isfile(VideoOutputName):
|
||||
|
||||
if not args.novideo:
|
||||
inputVideo = CurrentName + ' [' + str(CurrentHeigh) + 'p].mp4'
|
||||
if os.path.isfile(inputVideo):
|
||||
print('\n' + inputVideo + '\nFile has already been successfully downloaded previously.\n')
|
||||
else:
|
||||
wvdl_cfg = WvDownloaderConfig(xml, base_url, inputVideo, video_track_id, video_format_id, 'video')
|
||||
wvdownloader = WvDownloader(wvdl_cfg)
|
||||
wvdownloader.run()
|
||||
|
||||
if not args.noaudio:
|
||||
for x in audio_list:
|
||||
langAbbrev = x['Language']
|
||||
format_id = x['ID']
|
||||
inputAudio = CurrentName + ' ' + '(' + langAbbrev + ').mp4'
|
||||
inputAudio_demuxed = CurrentName + ' ' + '(' + langAbbrev + ')' + '.m4a'
|
||||
if os.path.isfile(inputAudio) or os.path.isfile(inputAudio_demuxed):
|
||||
print('\n' + inputAudio + '\nFile has already been successfully downloaded previously.\n')
|
||||
else:
|
||||
wvdl_cfg = WvDownloaderConfig(xml, base_url, inputAudio, x['TID'], x['ID'], 'audio')
|
||||
wvdownloader = WvDownloader(wvdl_cfg)
|
||||
wvdownloader.run()
|
||||
|
||||
CorrectDecryptVideo = False
|
||||
if not args.novideo:
|
||||
inputVideo = CurrentName + ' [' + str(CurrentHeigh) + 'p].mp4'
|
||||
if os.path.isfile(inputVideo):
|
||||
CorrectDecryptVideo = DecryptVideo(inputVideo=inputVideo, keys_video=keys_all)
|
||||
else:
|
||||
CorrectDecryptVideo = True
|
||||
|
||||
CorrectDecryptAudio = False
|
||||
if not args.noaudio:
|
||||
for x in audio_list:
|
||||
langAbbrev = x['Language']
|
||||
inputAudio = CurrentName + ' ' + '(' + langAbbrev + ')' + '.mp4'
|
||||
if os.path.isfile(inputAudio):
|
||||
CorrectDecryptAudio = DecryptAudio(inputAudio=inputAudio, keys_audio=keys_all)
|
||||
else:
|
||||
CorrectDecryptAudio = True
|
||||
|
||||
if not args.nomux:
|
||||
if not args.novideo:
|
||||
if not args.noaudio:
|
||||
if CorrectDecryptVideo == True:
|
||||
if CorrectDecryptAudio == True:
|
||||
print('\nMuxing...')
|
||||
|
||||
pmnpType = content_info['pmnpType']
|
||||
folderName = content_info['folderName']
|
||||
|
||||
if pmnpType=="show":
|
||||
MKV_Muxer=Muxer(CurrentName=CurrentName,
|
||||
SeasonFolder=folderName,
|
||||
CurrentHeigh=CurrentHeigh,
|
||||
Type=pmnpType,
|
||||
mkvmergeexe=mkvmergeexe)
|
||||
|
||||
else:
|
||||
MKV_Muxer=Muxer(CurrentName=CurrentName,
|
||||
SeasonFolder=None,
|
||||
CurrentHeigh=CurrentHeigh,
|
||||
Type=pmnpType,
|
||||
mkvmergeexe=mkvmergeexe)
|
||||
|
||||
MKV_Muxer.mkvmerge_muxer(lang="English")
|
||||
|
||||
if args.tag:
|
||||
inputName = CurrentName + ' [' + CurrentHeigh + 'p].mkv'
|
||||
release_group(base_filename=inputName,
|
||||
default_filename=CurrentName,
|
||||
folder_name=folderName,
|
||||
type=pmnpType,
|
||||
video_height=CurrentHeigh)
|
||||
|
||||
if not args.keep:
|
||||
for f in os.listdir():
|
||||
if re.fullmatch(re.escape(CurrentName) + r'.*\.(mp4|m4a|h264|h265|eac3|srt|txt|avs|lwi|mpd)', f):
|
||||
os.remove(f)
|
||||
print("Done!")
|
||||
else:
|
||||
print("\nFile '" + str(VideoOutputName) + "' already exists.")
|
||||
|
||||
def release_group(base_filename, default_filename, folder_name, type, video_height):
|
||||
if type=='show':
|
||||
video_mkv = os.path.join(folder_name, base_filename)
|
||||
else:
|
||||
video_mkv = base_filename
|
||||
|
||||
mediainfo = mediainfo_(video_mkv)
|
||||
for v in mediainfo['media']['track']: # mediainfo do video
|
||||
if v['@type'] == 'Video':
|
||||
video_format = v['Format']
|
||||
|
||||
video_codec = ''
|
||||
if video_format == "AVC":
|
||||
video_codec = 'H.264'
|
||||
elif video_format == "HEVC":
|
||||
video_codec = 'H.265'
|
||||
|
||||
for m in mediainfo['media']['track']: # mediainfo do audio
|
||||
if m['@type'] == 'Audio':
|
||||
codec_name = m['Format']
|
||||
channels_number = m['Channels']
|
||||
|
||||
audio_codec = ''
|
||||
audio_channels = ''
|
||||
if codec_name == "AAC":
|
||||
audio_codec = 'AAC'
|
||||
elif codec_name == "AC-3":
|
||||
audio_codec = "DD"
|
||||
elif codec_name == "E-AC-3":
|
||||
audio_codec = "DDP"
|
||||
elif codec_name == "E-AC-3 JOC":
|
||||
audio_codec = "ATMOS"
|
||||
|
||||
if channels_number == "2":
|
||||
audio_channels = "2.0"
|
||||
elif channels_number == "6":
|
||||
audio_channels = "5.1"
|
||||
|
||||
audio_ = audio_codec + audio_channels
|
||||
|
||||
# renomear arquivo
|
||||
default_filename = default_filename.replace('&', '.and.')
|
||||
default_filename = re.sub(r'[]!"#$%\'()*+,:;<=>?@\\^_`{|}~[-]', '', default_filename)
|
||||
default_filename = default_filename.replace(' ', '.')
|
||||
default_filename = re.sub(r'\.{2,}', '.', default_filename)
|
||||
default_filename = unidecode(default_filename)
|
||||
|
||||
output_name = '{}.{}p.PMNP.WEB-DL.{}.{}-{}'.format(default_filename, video_height, audio_, video_codec, args.tag)
|
||||
if type=='show':
|
||||
outputName = os.path.join(folder_name, output_name + '.mkv')
|
||||
else:
|
||||
outputName = output_name + '.mkv'
|
||||
|
||||
os.rename(video_mkv, outputName)
|
||||
print("{} -> {}".format(base_filename, output_name))
|
||||
|
||||
from pywidevine.decrypt.wvdecryptcustom import WvDecrypt
|
||||
from pywidevine.cdm import cdm, deviceconfig
|
||||
|
||||
def do_decrypt(init_data_b64, cert_data_b64, device, licurl, licheader):
|
||||
wvdecrypt = WvDecrypt(init_data_b64=init_data_b64, cert_data_b64=cert_data_b64, device=deviceconfig.device_android_generic)
|
||||
chal = wvdecrypt.get_challenge()
|
||||
headers = {
|
||||
'authorization': licheader
|
||||
}
|
||||
|
||||
try:
|
||||
license_res = requests.Session().post(url=licurl, data=chal, headers=headers, proxies=proxy_cfg.get_proxy('meta'))
|
||||
license_base64 = base64.b64encode(license_res.content)
|
||||
except Exception:
|
||||
print(license_res.text)
|
||||
license_base64 = "Error"
|
||||
return license_base64
|
||||
if license_base64 != 'Error':
|
||||
wvdecrypt.update_license(license_base64)
|
||||
wvdecrypt.start_process()
|
||||
Correct, keyswvdecrypt = wvdecrypt.start_process()
|
||||
return Correct, keyswvdecrypt
|
||||
else:
|
||||
keyswvdecrypt = []
|
||||
Correct = True
|
||||
return Correct, keyswvdecrypt
|
||||
|
||||
def get_keys(lic_info, pssh):
|
||||
Correct = False
|
||||
keys_new = []
|
||||
device = deviceconfig.device_android_generic
|
||||
while Correct is False:
|
||||
Correct, keys_new = do_decrypt(init_data_b64=bytes(pssh.encode()), cert_data_b64=None, device=device, licurl=lic_info['license'], licheader=lic_info['lic_header'])
|
||||
return keys_new
|
||||
|
||||
def DecryptAudio(inputAudio, keys_audio):
|
||||
key_audio_id_original = getKeyId(inputAudio)
|
||||
outputAudioTemp = inputAudio.replace('.mp4', '_dec.mp4')
|
||||
if key_audio_id_original != 'nothing':
|
||||
for key in keys_audio:
|
||||
key_id = key[0:32]
|
||||
if key_id == key_audio_id_original:
|
||||
print('\nDecrypting audio...')
|
||||
print('Using KEY: ' + key)
|
||||
wvdecrypt_process = subprocess.Popen([mp4decryptexe, '--show-progress', '--key', key, inputAudio, outputAudioTemp])
|
||||
stdoutdata, stderrdata = wvdecrypt_process.communicate()
|
||||
wvdecrypt_process.wait()
|
||||
time.sleep(0.05)
|
||||
os.remove(inputAudio)
|
||||
print('\nDemuxing audio...')
|
||||
mediainfo = mediainfo_(outputAudioTemp)
|
||||
for m in mediainfo['media']['track']:
|
||||
if m['@type'] == 'Audio':
|
||||
codec_name = m['Format']
|
||||
try:
|
||||
codec_tag_string = m['Format_Commercial_IfAny']
|
||||
except Exception:
|
||||
codec_tag_string = ''
|
||||
ext = ''
|
||||
if codec_name == "AAC":
|
||||
ext = '.m4a'
|
||||
elif codec_name == "E-AC-3":
|
||||
ext = ".eac3"
|
||||
elif codec_name == "AC-3":
|
||||
ext = ".ac3"
|
||||
outputAudio = outputAudioTemp.replace("_dec.mp4", ext)
|
||||
print("{} -> {}".format(outputAudioTemp, outputAudio))
|
||||
ff = ffmpy.FFmpeg(executable=ffmpegpath, inputs={outputAudioTemp: None}, outputs={outputAudio: '-c copy'}, global_options="-y -hide_banner -loglevel warning")
|
||||
ff.run()
|
||||
time.sleep (50.0/1000.0)
|
||||
os.remove(outputAudioTemp)
|
||||
print("Done!")
|
||||
return True
|
||||
|
||||
elif key_audio_id_original == "nothing":
|
||||
return True
|
||||
|
||||
def DecryptVideo(inputVideo, keys_video):
|
||||
key_video_id_original = getKeyId(inputVideo)
|
||||
inputVideo = inputVideo
|
||||
outputVideoTemp = inputVideo.replace('.mp4', '_dec.mp4')
|
||||
outputVideo = inputVideo
|
||||
if key_video_id_original != 'nothing':
|
||||
for key in keys_video:
|
||||
key_id = key[0:32]
|
||||
if key_id == key_video_id_original:
|
||||
print('\nDecrypting video...')
|
||||
print('Using KEY: ' + key)
|
||||
wvdecrypt_process = subprocess.Popen([mp4decryptexe, '--show-progress', '--key', key, inputVideo, outputVideoTemp])
|
||||
stdoutdata, stderrdata = wvdecrypt_process.communicate()
|
||||
wvdecrypt_process.wait()
|
||||
print('\nRemuxing video...')
|
||||
ff = ffmpy.FFmpeg(executable=ffmpegpath, inputs={outputVideoTemp: None}, outputs={outputVideo: '-c copy'}, global_options='-y -hide_banner -loglevel warning')
|
||||
ff.run()
|
||||
time.sleep(0.05)
|
||||
os.remove(outputVideoTemp)
|
||||
print('Done!')
|
||||
return True
|
||||
|
||||
elif key_video_id_original == 'nothing':
|
||||
return True
|
||||
|
||||
def DemuxAudio(inputAudio):
|
||||
if os.path.isfile(inputAudio):
|
||||
print('\nDemuxing audio...')
|
||||
mediainfo = mediainfo_(inputAudio)
|
||||
for m in mediainfo['media']['track']:
|
||||
if m['@type'] == 'Audio':
|
||||
codec_name = m['Format']
|
||||
try:
|
||||
codec_tag_string = m['Format_Commercial_IfAny']
|
||||
except Exception:
|
||||
codec_tag_string = ''
|
||||
|
||||
ext = ''
|
||||
if codec_name == 'AAC':
|
||||
ext = '.m4a'
|
||||
else:
|
||||
if codec_name == 'E-AC-3':
|
||||
ext = '.eac3'
|
||||
else:
|
||||
if codec_name == 'AC-3':
|
||||
ext = '.ac3'
|
||||
outputAudio = inputAudio.replace('.mp4', ext)
|
||||
print('{} -> {}'.format(inputAudio, outputAudio))
|
||||
ff = ffmpy.FFmpeg(executable=ffmpegpath,
|
||||
inputs={inputAudio: None},
|
||||
outputs={outputAudio: '-c copy'},
|
||||
global_options='-y -hide_banner -loglevel warning')
|
||||
ff.run()
|
||||
time.sleep(0.05)
|
||||
os.remove(inputAudio)
|
||||
print('Done!')
|
||||
|
||||
get_content_info()
|
|
@ -0,0 +1,364 @@
|
|||
import base64
|
||||
|
||||
import os
|
||||
import time
|
||||
import binascii
|
||||
|
||||
from google.protobuf.message import DecodeError
|
||||
from google.protobuf import text_format
|
||||
|
||||
from pywidevine.cdm.formats import wv_proto2_pb2 as wv_proto2
|
||||
from pywidevine.cdm.session import Session
|
||||
from pywidevine.cdm.key import Key
|
||||
from Cryptodome.Random import get_random_bytes
|
||||
from Cryptodome.Random import random
|
||||
from Cryptodome.Cipher import PKCS1_OAEP, AES
|
||||
from Cryptodome.Hash import CMAC, SHA256, HMAC, SHA1
|
||||
from Cryptodome.PublicKey import RSA
|
||||
from Cryptodome.Signature import pss
|
||||
from Cryptodome.Util import Padding
|
||||
import logging
|
||||
|
||||
class Cdm:
|
||||
def __init__(self):
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.sessions = {}
|
||||
|
||||
def open_session(self, init_data_b64, device, raw_init_data = None, offline=False):
|
||||
self.logger.debug("open_session(init_data_b64={}, device={}".format(init_data_b64, device))
|
||||
self.logger.info("opening new cdm session")
|
||||
if device.session_id_type == 'android':
|
||||
# format: 16 random hexdigits, 2 digit counter, 14 0s
|
||||
rand_ascii = ''.join(random.choice('ABCDEF0123456789') for _ in range(16))
|
||||
counter = '01' # this resets regularly so its fine to use 01
|
||||
rest = '00000000000000'
|
||||
session_id = rand_ascii + counter + rest
|
||||
session_id = session_id.encode('ascii')
|
||||
elif device.session_id_type == 'chrome':
|
||||
rand_bytes = get_random_bytes(16)
|
||||
session_id = rand_bytes
|
||||
else:
|
||||
# other formats NYI
|
||||
self.logger.error("device type is unusable")
|
||||
return 1
|
||||
if raw_init_data and isinstance(raw_init_data, (bytes, bytearray)):
|
||||
# used for NF key exchange, where they don't provide a valid PSSH
|
||||
init_data = raw_init_data
|
||||
self.raw_pssh = True
|
||||
else:
|
||||
init_data = self._parse_init_data(init_data_b64)
|
||||
self.raw_pssh = False
|
||||
|
||||
if init_data:
|
||||
new_session = Session(session_id, init_data, device, offline)
|
||||
else:
|
||||
self.logger.error("unable to parse init data")
|
||||
return 1
|
||||
self.sessions[session_id] = new_session
|
||||
self.logger.info("session opened and init data parsed successfully")
|
||||
return session_id
|
||||
|
||||
def _parse_init_data(self, init_data_b64):
|
||||
parsed_init_data = wv_proto2.WidevineCencHeader()
|
||||
try:
|
||||
self.logger.debug("trying to parse init_data directly")
|
||||
parsed_init_data.ParseFromString(base64.b64decode(init_data_b64)[32:])
|
||||
except DecodeError:
|
||||
self.logger.debug("unable to parse as-is, trying with removed pssh box header")
|
||||
try:
|
||||
id_bytes = parsed_init_data.ParseFromString(base64.b64decode(init_data_b64)[32:])
|
||||
except DecodeError:
|
||||
self.logger.error("unable to parse, unsupported init data format")
|
||||
return None
|
||||
self.logger.debug("init_data:")
|
||||
for line in text_format.MessageToString(parsed_init_data).splitlines():
|
||||
self.logger.debug(line)
|
||||
return parsed_init_data
|
||||
|
||||
def close_session(self, session_id):
|
||||
self.logger.debug("close_session(session_id={})".format(session_id))
|
||||
self.logger.info("closing cdm session")
|
||||
if session_id in self.sessions:
|
||||
self.sessions.pop(session_id)
|
||||
self.logger.info("cdm session closed")
|
||||
return 0
|
||||
else:
|
||||
self.logger.info("session {} not found".format(session_id))
|
||||
return 1
|
||||
|
||||
def set_service_certificate(self, session_id, cert_b64):
|
||||
self.logger.debug("set_service_certificate(session_id={}, cert={})".format(session_id, cert_b64))
|
||||
self.logger.info("setting service certificate")
|
||||
|
||||
if session_id not in self.sessions:
|
||||
self.logger.error("session id doesn't exist")
|
||||
return 1
|
||||
|
||||
session = self.sessions[session_id]
|
||||
|
||||
message = wv_proto2.SignedMessage()
|
||||
|
||||
try:
|
||||
message.ParseFromString(base64.b64decode(cert_b64))
|
||||
except DecodeError:
|
||||
self.logger.error("failed to parse cert as SignedMessage")
|
||||
|
||||
service_certificate = wv_proto2.SignedDeviceCertificate()
|
||||
|
||||
|
||||
if message.Type:
|
||||
self.logger.debug("service cert provided as signedmessage")
|
||||
try:
|
||||
service_certificate.ParseFromString(message.Msg)
|
||||
|
||||
except DecodeError:
|
||||
self.logger.error("failed to parse service certificate")
|
||||
return 1
|
||||
else:
|
||||
self.logger.debug("service cert provided as signeddevicecertificate")
|
||||
try:
|
||||
service_certificate.ParseFromString(base64.b64decode(cert_b64))
|
||||
except DecodeError:
|
||||
self.logger.error("failed to parse service certificate")
|
||||
return 1
|
||||
|
||||
self.logger.debug("service certificate:")
|
||||
for line in text_format.MessageToString(service_certificate).splitlines():
|
||||
self.logger.debug(line)
|
||||
|
||||
session.service_certificate = service_certificate
|
||||
session.privacy_mode = True
|
||||
|
||||
return 0
|
||||
|
||||
def get_license_request(self, session_id):
|
||||
self.logger.debug("get_license_request(session_id={})".format(session_id))
|
||||
self.logger.info("getting license request")
|
||||
|
||||
if session_id not in self.sessions:
|
||||
self.logger.error("session ID does not exist")
|
||||
return 1
|
||||
|
||||
session = self.sessions[session_id]
|
||||
|
||||
# raw pssh will be treated as bytes and not parsed
|
||||
if self.raw_pssh:
|
||||
license_request = wv_proto2.SignedLicenseRequestRaw()
|
||||
else:
|
||||
license_request = wv_proto2.SignedLicenseRequest()
|
||||
client_id = wv_proto2.ClientIdentification()
|
||||
|
||||
if not os.path.exists(session.device_config.device_client_id_blob_filename):
|
||||
self.logger.error("no client ID blob available for this device")
|
||||
return 1
|
||||
|
||||
with open(session.device_config.device_client_id_blob_filename, "rb") as f:
|
||||
try:
|
||||
cid_bytes = client_id.ParseFromString(f.read())
|
||||
except DecodeError:
|
||||
self.logger.error("client id failed to parse as protobuf")
|
||||
return 1
|
||||
|
||||
self.logger.debug("building license request")
|
||||
if not self.raw_pssh:
|
||||
license_request.Type = wv_proto2.SignedLicenseRequest.MessageType.Value('LICENSE_REQUEST')
|
||||
license_request.Msg.ContentId.CencId.Pssh.CopyFrom(session.init_data)
|
||||
else:
|
||||
license_request.Type = wv_proto2.SignedLicenseRequestRaw.MessageType.Value('LICENSE_REQUEST')
|
||||
license_request.Msg.ContentId.CencId.Pssh = session.init_data # bytes
|
||||
|
||||
if session.offline:
|
||||
license_type = wv_proto2.LicenseType.Value('OFFLINE')
|
||||
else:
|
||||
license_type = wv_proto2.LicenseType.Value('DEFAULT')
|
||||
license_request.Msg.ContentId.CencId.LicenseType = license_type
|
||||
license_request.Msg.ContentId.CencId.RequestId = session_id
|
||||
license_request.Msg.Type = wv_proto2.LicenseRequest.RequestType.Value('NEW')
|
||||
license_request.Msg.RequestTime = int(time.time())
|
||||
license_request.Msg.ProtocolVersion = wv_proto2.ProtocolVersion.Value('CURRENT')
|
||||
if session.device_config.send_key_control_nonce:
|
||||
license_request.Msg.KeyControlNonce = random.randrange(1, 2**31)
|
||||
|
||||
if session.privacy_mode:
|
||||
if session.device_config.vmp:
|
||||
self.logger.debug("vmp required, adding to client_id")
|
||||
self.logger.debug("reading vmp hashes")
|
||||
vmp_hashes = wv_proto2.FileHashes()
|
||||
with open(session.device_config.device_vmp_blob_filename, "rb") as f:
|
||||
try:
|
||||
vmp_bytes = vmp_hashes.ParseFromString(f.read())
|
||||
except DecodeError:
|
||||
self.logger.error("vmp hashes failed to parse as protobuf")
|
||||
return 1
|
||||
client_id._FileHashes.CopyFrom(vmp_hashes)
|
||||
self.logger.debug("privacy mode & service certificate loaded, encrypting client id")
|
||||
self.logger.debug("unencrypted client id:")
|
||||
for line in text_format.MessageToString(client_id).splitlines():
|
||||
self.logger.debug(line)
|
||||
cid_aes_key = get_random_bytes(16)
|
||||
cid_iv = get_random_bytes(16)
|
||||
|
||||
cid_cipher = AES.new(cid_aes_key, AES.MODE_CBC, cid_iv)
|
||||
|
||||
encrypted_client_id = cid_cipher.encrypt(Padding.pad(client_id.SerializeToString(), 16))
|
||||
|
||||
service_public_key = RSA.importKey(session.service_certificate._DeviceCertificate.PublicKey)
|
||||
|
||||
service_cipher = PKCS1_OAEP.new(service_public_key)
|
||||
|
||||
encrypted_cid_key = service_cipher.encrypt(cid_aes_key)
|
||||
|
||||
encrypted_client_id_proto = wv_proto2.EncryptedClientIdentification()
|
||||
|
||||
encrypted_client_id_proto.ServiceId = session.service_certificate._DeviceCertificate.ServiceId
|
||||
encrypted_client_id_proto.ServiceCertificateSerialNumber = session.service_certificate._DeviceCertificate.SerialNumber
|
||||
encrypted_client_id_proto.EncryptedClientId = encrypted_client_id
|
||||
encrypted_client_id_proto.EncryptedClientIdIv = cid_iv
|
||||
encrypted_client_id_proto.EncryptedPrivacyKey = encrypted_cid_key
|
||||
|
||||
license_request.Msg.EncryptedClientId.CopyFrom(encrypted_client_id_proto)
|
||||
else:
|
||||
license_request.Msg.ClientId.CopyFrom(client_id)
|
||||
|
||||
if session.device_config.private_key_available:
|
||||
key = RSA.importKey(open(session.device_config.device_private_key_filename).read())
|
||||
session.device_key = key
|
||||
else:
|
||||
self.logger.error("need device private key, other methods unimplemented")
|
||||
return 1
|
||||
|
||||
self.logger.debug("signing license request")
|
||||
|
||||
hash = SHA1.new(license_request.Msg.SerializeToString())
|
||||
signature = pss.new(key).sign(hash)
|
||||
|
||||
license_request.Signature = signature
|
||||
|
||||
session.license_request = license_request
|
||||
|
||||
self.logger.debug("license request:")
|
||||
for line in text_format.MessageToString(session.license_request).splitlines():
|
||||
self.logger.debug(line)
|
||||
self.logger.info("license request created")
|
||||
self.logger.debug("license request b64: {}".format(base64.b64encode(license_request.SerializeToString())))
|
||||
return license_request.SerializeToString()
|
||||
|
||||
def provide_license(self, session_id, license_b64):
|
||||
self.logger.debug("provide_license(session_id={}, license_b64={})".format(session_id, license_b64))
|
||||
self.logger.info("decrypting provided license")
|
||||
|
||||
if session_id not in self.sessions:
|
||||
self.logger.error("session does not exist")
|
||||
return 1
|
||||
|
||||
session = self.sessions[session_id]
|
||||
|
||||
if not session.license_request:
|
||||
self.logger.error("generate a license request first!")
|
||||
return 1
|
||||
|
||||
license = wv_proto2.SignedLicense()
|
||||
try:
|
||||
license.ParseFromString(base64.b64decode(license_b64))
|
||||
except DecodeError:
|
||||
self.logger.error("unable to parse license - check protobufs")
|
||||
return 1
|
||||
|
||||
session.license = license
|
||||
|
||||
self.logger.debug("license:")
|
||||
for line in text_format.MessageToString(license).splitlines():
|
||||
self.logger.debug(line)
|
||||
|
||||
self.logger.debug("deriving keys from session key")
|
||||
|
||||
oaep_cipher = PKCS1_OAEP.new(session.device_key)
|
||||
|
||||
session.session_key = oaep_cipher.decrypt(license.SessionKey)
|
||||
|
||||
lic_req_msg = session.license_request.Msg.SerializeToString()
|
||||
|
||||
enc_key_base = b"ENCRYPTION\000" + lic_req_msg + b"\0\0\0\x80"
|
||||
auth_key_base = b"AUTHENTICATION\0" + lic_req_msg + b"\0\0\2\0"
|
||||
|
||||
enc_key = b"\x01" + enc_key_base
|
||||
auth_key_1 = b"\x01" + auth_key_base
|
||||
auth_key_2 = b"\x02" + auth_key_base
|
||||
auth_key_3 = b"\x03" + auth_key_base
|
||||
auth_key_4 = b"\x04" + auth_key_base
|
||||
|
||||
cmac_obj = CMAC.new(session.session_key, ciphermod=AES)
|
||||
cmac_obj.update(enc_key)
|
||||
|
||||
enc_cmac_key = cmac_obj.digest()
|
||||
|
||||
cmac_obj = CMAC.new(session.session_key, ciphermod=AES)
|
||||
cmac_obj.update(auth_key_1)
|
||||
auth_cmac_key_1 = cmac_obj.digest()
|
||||
|
||||
cmac_obj = CMAC.new(session.session_key, ciphermod=AES)
|
||||
cmac_obj.update(auth_key_2)
|
||||
auth_cmac_key_2 = cmac_obj.digest()
|
||||
|
||||
cmac_obj = CMAC.new(session.session_key, ciphermod=AES)
|
||||
cmac_obj.update(auth_key_3)
|
||||
auth_cmac_key_3 = cmac_obj.digest()
|
||||
|
||||
cmac_obj = CMAC.new(session.session_key, ciphermod=AES)
|
||||
cmac_obj.update(auth_key_4)
|
||||
auth_cmac_key_4 = cmac_obj.digest()
|
||||
|
||||
auth_cmac_combined_1 = auth_cmac_key_1 + auth_cmac_key_2
|
||||
auth_cmac_combined_2 = auth_cmac_key_3 + auth_cmac_key_4
|
||||
|
||||
session.derived_keys['enc'] = enc_cmac_key
|
||||
session.derived_keys['auth_1'] = auth_cmac_combined_1
|
||||
session.derived_keys['auth_2'] = auth_cmac_combined_2
|
||||
|
||||
self.logger.debug('verifying license signature')
|
||||
|
||||
lic_hmac = HMAC.new(session.derived_keys['auth_1'], digestmod=SHA256)
|
||||
lic_hmac.update(license.Msg.SerializeToString())
|
||||
|
||||
self.logger.debug("calculated sig: {} actual sig: {}".format(lic_hmac.hexdigest(), binascii.hexlify(license.Signature)))
|
||||
|
||||
if lic_hmac.digest() != license.Signature:
|
||||
self.logger.info("license signature doesn't match - writing bin so they can be debugged")
|
||||
with open("original_lic.bin", "wb") as f:
|
||||
f.write(base64.b64decode(license_b64))
|
||||
with open("parsed_lic.bin", "wb") as f:
|
||||
f.write(license.SerializeToString())
|
||||
self.logger.info("continuing anyway")
|
||||
|
||||
self.logger.debug("key count: {}".format(len(license.Msg.Key)))
|
||||
for key in license.Msg.Key:
|
||||
if key.Id:
|
||||
key_id = key.Id
|
||||
else:
|
||||
key_id = wv_proto2.License.KeyContainer.KeyType.Name(key.Type).encode('utf-8')
|
||||
encrypted_key = key.Key
|
||||
iv = key.Iv
|
||||
type = wv_proto2.License.KeyContainer.KeyType.Name(key.Type)
|
||||
|
||||
cipher = AES.new(session.derived_keys['enc'], AES.MODE_CBC, iv=iv)
|
||||
decrypted_key = cipher.decrypt(encrypted_key)
|
||||
if type == "OPERATOR_SESSION":
|
||||
permissions = []
|
||||
perms = key._OperatorSessionKeyPermissions
|
||||
for (descriptor, value) in perms.ListFields():
|
||||
if value == 1:
|
||||
permissions.append(descriptor.name)
|
||||
print(permissions)
|
||||
else:
|
||||
permissions = []
|
||||
session.keys.append(Key(key_id, type, Padding.unpad(decrypted_key, 16), permissions))
|
||||
|
||||
self.logger.info("decrypted all keys")
|
||||
return 0
|
||||
|
||||
def get_keys(self, session_id):
|
||||
if session_id in self.sessions:
|
||||
return self.sessions[session_id].keys
|
||||
else:
|
||||
self.logger.error("session not found")
|
||||
return 1
|
|
@ -0,0 +1,53 @@
|
|||
import os
|
||||
|
||||
device_android_generic = {
|
||||
'name': 'android_generic',
|
||||
'description': 'android generic l3',
|
||||
'security_level': 3,
|
||||
'session_id_type': 'android',
|
||||
'private_key_available': True,
|
||||
'vmp': False,
|
||||
'send_key_control_nonce': True
|
||||
}
|
||||
|
||||
devices_available = [device_android_generic]
|
||||
|
||||
FILES_FOLDER = 'devices'
|
||||
|
||||
class DeviceConfig:
|
||||
def __init__(self, device):
|
||||
self.device_name = device['name']
|
||||
self.description = device['description']
|
||||
self.security_level = device['security_level']
|
||||
self.session_id_type = device['session_id_type']
|
||||
self.private_key_available = device['private_key_available']
|
||||
self.vmp = device['vmp']
|
||||
self.send_key_control_nonce = device['send_key_control_nonce']
|
||||
|
||||
if 'keybox_filename' in device:
|
||||
self.keybox_filename = os.path.join(os.path.dirname(__file__), FILES_FOLDER, device['name'], device['keybox_filename'])
|
||||
else:
|
||||
self.keybox_filename = os.path.join(os.path.dirname(__file__), FILES_FOLDER, device['name'], 'keybox')
|
||||
|
||||
if 'device_cert_filename' in device:
|
||||
self.device_cert_filename = os.path.join(os.path.dirname(__file__), FILES_FOLDER, device['name'], device['device_cert_filename'])
|
||||
else:
|
||||
self.device_cert_filename = os.path.join(os.path.dirname(__file__), FILES_FOLDER, device['name'], 'device_cert')
|
||||
|
||||
if 'device_private_key_filename' in device:
|
||||
self.device_private_key_filename = os.path.join(os.path.dirname(__file__), FILES_FOLDER, device['name'], device['device_private_key_filename'])
|
||||
else:
|
||||
self.device_private_key_filename = os.path.join(os.path.dirname(__file__), FILES_FOLDER, device['name'], 'device_private_key')
|
||||
|
||||
if 'device_client_id_blob_filename' in device:
|
||||
self.device_client_id_blob_filename = os.path.join(os.path.dirname(__file__), FILES_FOLDER, device['name'], device['device_client_id_blob_filename'])
|
||||
else:
|
||||
self.device_client_id_blob_filename = os.path.join(os.path.dirname(__file__), FILES_FOLDER, device['name'], 'device_client_id_blob')
|
||||
|
||||
if 'device_vmp_blob_filename' in device:
|
||||
self.device_vmp_blob_filename = os.path.join(os.path.dirname(__file__), FILES_FOLDER, device['name'], device['device_vmp_blob_filename'])
|
||||
else:
|
||||
self.device_vmp_blob_filename = os.path.join(os.path.dirname(__file__), FILES_FOLDER, device['name'], 'device_vmp_blob')
|
||||
|
||||
def __repr__(self):
|
||||
return "DeviceConfig(name={}, description={}, security_level={}, session_id_type={}, private_key_available={}, vmp={})".format(self.device_name, self.description, self.security_level, self.session_id_type, self.private_key_available, self.vmp)
|
Binary file not shown.
|
@ -0,0 +1,27 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEpQIBAAKCAQEA4sUKDpvMG/idF8oCH5AVSwFd5Mk+rEwOBsLZMYdliXWe1hn9
|
||||
mdE6u9pjsr+bLrZjlKxMFqPPxbIUcC1Ii7BFSje2Fd8kxnaIprQWxDPgK+NSSx7v
|
||||
Un452TyB1L9lx39ZBt0PlRfwjkCodX+I9y+oBga73NRh7hPbtLzXe/r/ubFBaEu+
|
||||
aRkDZBwYPqHgH1RoFLuyFNMjfqGcPosGxceDtvPysmBxB93Hk2evml5fjdYGg6tx
|
||||
z510g+XFPDFv7GSy1KuWqit83MqzPls9qAQMkwUc05ggjDhGCKW4/p97fn23WDFE
|
||||
3TzSSsQvyJLKA3s9oJbtJCD/gOHYqDvnWn8zPwIDAQABAoIBAQDCWe1Mp+o+7sx0
|
||||
XwWC15HoPruiIXg9YtGCqexLrqcvMEd5Z70Z32BfL8TSpbTyTA78lM6BeNPRs9Yg
|
||||
bi8GyYQZH7ZG+IAkN+LWPPJmJa+y7ZjSGSkzoksiC+GZ3I/2cwZyA3Qfa+0XfgLi
|
||||
8PMKJyXyREMt+DgWO57JQC/OakhRdCR19mM6NKd+ynd/IEz/NIbjMLDVKwW8HEPx
|
||||
N3r5CU9O96nr62DI68KVj3jwUR3cDi/5xfhosYhCQjHJuobNbeFR18dY2nQNLWYd
|
||||
S0wtskla1fl9eYHwYAzwru4wHT4WJC7+V4pscfCI0YZB6PslxDKrv73l5H1tz4cf
|
||||
Vy58NRSBAoGBAPSmjoVtQzTvQ6PZIs81SF1ulJI9kUpyFaBoSSgt+2ZkeNtF6Hih
|
||||
Zm7OVJ9wg9sfjpB3SFBUjuhXz/ts/t6dkA2PgCbrvhBMRKSGbfyhhtM2gRf002I4
|
||||
bJ7Y0C/ont4WzC/XbXEkAmh+fG2/JRvbdVQaIdyS6MmVHtCtRsHEQZS5AoGBAO1K
|
||||
IXOKAFA+320+Hkbqskfevmxrv+JHIdetliaREZwQH+VYUUM8u5/Kt3oyMat+mH90
|
||||
rZOKQK2zM8cz4tKclTUT54nrtICxeo6UHVc56FqXZ6sVvVgm8Cnvt1md4XwG4FwQ
|
||||
r/OlaM6Hr5HRf8dkzuzqm4ZQYRHGzZ6AMphj8Xu3AoGAdmo7p5dIJVH98kuCDrsi
|
||||
iJ6iaNpF/buUfiyb5EfFXD0bRj7jE6hDdTSHPxjtqVzv2zrxFHipJwqBz5dlEYlA
|
||||
FWA0ziHiv+66dsveZp4kLQ0/lMHaorre0E/vDJFSe/qa4DksbsvYIo2+WjxfkMk7
|
||||
U/bGFwZAiHmWDbkg+16rw3kCgYEAyyodWf9eJVavlakJ404vNrnP8KSQtfyRTUii
|
||||
toKewTBNHuBvM1JckoPOdCFlxZ+ukfIka56DojU8r+IM4qaOWdOg+sWE1mses9S9
|
||||
CmHaPzZC3IjQhRlRp5ZHNcOnu7lnf2wKOmH1Sl+CQydMcDwvr0lvv6AyfDXq9zps
|
||||
F2365CECgYEAmYgs/qwnh9m0aGDw/ZGrASoE0TxlpizPvsVDGx9t9UGC2Z+5QvAE
|
||||
ZcQeKoLCbktr0BnRLI+W1g+KpXQGcnSF9VX/qwUlf72XA6C6kobQvW+Yd/H/IN5d
|
||||
jPqoL/m41rRzm+J+9/Tfc8Aiy1kkllUYnVJdC5QLAIswuhI8lkaFTN4=
|
||||
-----END RSA PRIVATE KEY-----
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,466 @@
|
|||
syntax = "proto2";
|
||||
|
||||
// from x86 (partial), most of it from the ARM version:
|
||||
message ClientIdentification {
|
||||
enum TokenType {
|
||||
KEYBOX = 0;
|
||||
DEVICE_CERTIFICATE = 1;
|
||||
REMOTE_ATTESTATION_CERTIFICATE = 2;
|
||||
}
|
||||
message NameValue {
|
||||
required string Name = 1;
|
||||
required string Value = 2;
|
||||
}
|
||||
message ClientCapabilities {
|
||||
enum HdcpVersion {
|
||||
HDCP_NONE = 0;
|
||||
HDCP_V1 = 1;
|
||||
HDCP_V2 = 2;
|
||||
HDCP_V2_1 = 3;
|
||||
HDCP_V2_2 = 4;
|
||||
}
|
||||
optional uint32 ClientToken = 1;
|
||||
optional uint32 SessionToken = 2;
|
||||
optional uint32 VideoResolutionConstraints = 3;
|
||||
optional HdcpVersion MaxHdcpVersion = 4;
|
||||
optional uint32 OemCryptoApiVersion = 5;
|
||||
}
|
||||
required TokenType Type = 1;
|
||||
//optional bytes Token = 2; // by default the client treats this as blob, but it's usually a DeviceCertificate, so for usefulness sake, I'm replacing it with this one:
|
||||
optional SignedDeviceCertificate Token = 2; // use this when parsing, "bytes" when building a client id blob
|
||||
repeated NameValue ClientInfo = 3;
|
||||
optional bytes ProviderClientToken = 4;
|
||||
optional uint32 LicenseCounter = 5;
|
||||
optional ClientCapabilities _ClientCapabilities = 6; // how should we deal with duped names? will have to look at proto docs later
|
||||
optional FileHashes _FileHashes = 7; // vmp blob goes here
|
||||
}
|
||||
|
||||
message DeviceCertificate {
|
||||
enum CertificateType {
|
||||
ROOT = 0;
|
||||
INTERMEDIATE = 1;
|
||||
USER_DEVICE = 2;
|
||||
SERVICE = 3;
|
||||
}
|
||||
required CertificateType Type = 1; // the compiled code reused this as ProvisionedDeviceInfo.WvSecurityLevel, however that is incorrect (compiler aliased it as they're both identical as a structure)
|
||||
optional bytes SerialNumber = 2;
|
||||
optional uint32 CreationTimeSeconds = 3;
|
||||
optional bytes PublicKey = 4;
|
||||
optional uint32 SystemId = 5;
|
||||
optional uint32 TestDeviceDeprecated = 6; // is it bool or int?
|
||||
optional bytes ServiceId = 7; // service URL for service certificates
|
||||
}
|
||||
|
||||
// missing some references,
|
||||
message DeviceCertificateStatus {
|
||||
enum CertificateStatus {
|
||||
VALID = 0;
|
||||
REVOKED = 1;
|
||||
}
|
||||
optional bytes SerialNumber = 1;
|
||||
optional CertificateStatus Status = 2;
|
||||
optional ProvisionedDeviceInfo DeviceInfo = 4; // where is 3? is it deprecated?
|
||||
}
|
||||
|
||||
message DeviceCertificateStatusList {
|
||||
optional uint32 CreationTimeSeconds = 1;
|
||||
repeated DeviceCertificateStatus CertificateStatus = 2;
|
||||
}
|
||||
|
||||
message EncryptedClientIdentification {
|
||||
required string ServiceId = 1;
|
||||
optional bytes ServiceCertificateSerialNumber = 2;
|
||||
required bytes EncryptedClientId = 3;
|
||||
required bytes EncryptedClientIdIv = 4;
|
||||
required bytes EncryptedPrivacyKey = 5;
|
||||
}
|
||||
|
||||
// todo: fill (for this top-level type, it might be impossible/difficult)
|
||||
enum LicenseType {
|
||||
ZERO = 0;
|
||||
DEFAULT = 1; // 1 is STREAMING/temporary license; on recent versions may go up to 3 (latest x86); it might be persist/don't persist type, unconfirmed
|
||||
OFFLINE = 2;
|
||||
}
|
||||
|
||||
// todo: fill (for this top-level type, it might be impossible/difficult)
|
||||
// this is just a guess because these globals got lost, but really, do we need more?
|
||||
enum ProtocolVersion {
|
||||
CURRENT = 21; // don't have symbols for this
|
||||
}
|
||||
|
||||
|
||||
message LicenseIdentification {
|
||||
optional bytes RequestId = 1;
|
||||
optional bytes SessionId = 2;
|
||||
optional bytes PurchaseId = 3;
|
||||
optional LicenseType Type = 4;
|
||||
optional uint32 Version = 5;
|
||||
optional bytes ProviderSessionToken = 6;
|
||||
}
|
||||
|
||||
|
||||
message License {
|
||||
message Policy {
|
||||
optional bool CanPlay = 1; // changed from uint32 to bool
|
||||
optional bool CanPersist = 2;
|
||||
optional bool CanRenew = 3;
|
||||
optional uint32 RentalDurationSeconds = 4;
|
||||
optional uint32 PlaybackDurationSeconds = 5;
|
||||
optional uint32 LicenseDurationSeconds = 6;
|
||||
optional uint32 RenewalRecoveryDurationSeconds = 7;
|
||||
optional string RenewalServerUrl = 8;
|
||||
optional uint32 RenewalDelaySeconds = 9;
|
||||
optional uint32 RenewalRetryIntervalSeconds = 10;
|
||||
optional bool RenewWithUsage = 11; // was uint32
|
||||
}
|
||||
message KeyContainer {
|
||||
enum KeyType {
|
||||
SIGNING = 1;
|
||||
CONTENT = 2;
|
||||
KEY_CONTROL = 3;
|
||||
OPERATOR_SESSION = 4;
|
||||
}
|
||||
enum SecurityLevel {
|
||||
SW_SECURE_CRYPTO = 1;
|
||||
SW_SECURE_DECODE = 2;
|
||||
HW_SECURE_CRYPTO = 3;
|
||||
HW_SECURE_DECODE = 4;
|
||||
HW_SECURE_ALL = 5;
|
||||
}
|
||||
message OutputProtection {
|
||||
enum CGMS {
|
||||
COPY_FREE = 0;
|
||||
COPY_ONCE = 2;
|
||||
COPY_NEVER = 3;
|
||||
CGMS_NONE = 0x2A; // PC default!
|
||||
}
|
||||
optional ClientIdentification.ClientCapabilities.HdcpVersion Hdcp = 1; // it's most likely a copy of Hdcp version available here, but compiler optimized it away
|
||||
optional CGMS CgmsFlags = 2;
|
||||
}
|
||||
message KeyControl {
|
||||
required bytes KeyControlBlock = 1; // what is this?
|
||||
required bytes Iv = 2;
|
||||
}
|
||||
message OperatorSessionKeyPermissions {
|
||||
optional uint32 AllowEncrypt = 1;
|
||||
optional uint32 AllowDecrypt = 2;
|
||||
optional uint32 AllowSign = 3;
|
||||
optional uint32 AllowSignatureVerify = 4;
|
||||
}
|
||||
message VideoResolutionConstraint {
|
||||
optional uint32 MinResolutionPixels = 1;
|
||||
optional uint32 MaxResolutionPixels = 2;
|
||||
optional OutputProtection RequiredProtection = 3;
|
||||
}
|
||||
optional bytes Id = 1;
|
||||
optional bytes Iv = 2;
|
||||
optional bytes Key = 3;
|
||||
optional KeyType Type = 4;
|
||||
optional SecurityLevel Level = 5;
|
||||
optional OutputProtection RequiredProtection = 6;
|
||||
optional OutputProtection RequestedProtection = 7;
|
||||
optional KeyControl _KeyControl = 8; // duped names, etc
|
||||
optional OperatorSessionKeyPermissions _OperatorSessionKeyPermissions = 9; // duped names, etc
|
||||
repeated VideoResolutionConstraint VideoResolutionConstraints = 10;
|
||||
}
|
||||
optional LicenseIdentification Id = 1;
|
||||
optional Policy _Policy = 2; // duped names, etc
|
||||
repeated KeyContainer Key = 3;
|
||||
optional uint32 LicenseStartTime = 4;
|
||||
optional uint32 RemoteAttestationVerified = 5; // bool?
|
||||
optional bytes ProviderClientToken = 6;
|
||||
// there might be more, check with newer versions (I see field 7-8 in a lic)
|
||||
// this appeared in latest x86:
|
||||
optional uint32 ProtectionScheme = 7; // type unconfirmed fully, but it's likely as WidevineCencHeader describesit (fourcc)
|
||||
}
|
||||
|
||||
message LicenseError {
|
||||
enum Error {
|
||||
INVALID_DEVICE_CERTIFICATE = 1;
|
||||
REVOKED_DEVICE_CERTIFICATE = 2;
|
||||
SERVICE_UNAVAILABLE = 3;
|
||||
}
|
||||
//LicenseRequest.RequestType ErrorCode; // clang mismatch
|
||||
optional Error ErrorCode = 1;
|
||||
}
|
||||
|
||||
message LicenseRequest {
|
||||
message ContentIdentification {
|
||||
message CENC {
|
||||
//optional bytes Pssh = 1; // the client's definition is opaque, it doesn't care about the contents, but the PSSH has a clear definition that is understood and requested by the server, thus I'll replace it with:
|
||||
optional WidevineCencHeader Pssh = 1;
|
||||
optional LicenseType LicenseType = 2; // unfortunately the LicenseType symbols are not present, acceptable value seems to only be 1 (is this persist/don't persist? look into it!)
|
||||
optional bytes RequestId = 3;
|
||||
}
|
||||
message WebM {
|
||||
optional bytes Header = 1; // identical to CENC, aside from PSSH and the parent field number used
|
||||
optional LicenseType LicenseType = 2;
|
||||
optional bytes RequestId = 3;
|
||||
}
|
||||
message ExistingLicense {
|
||||
optional LicenseIdentification LicenseId = 1;
|
||||
optional uint32 SecondsSinceStarted = 2;
|
||||
optional uint32 SecondsSinceLastPlayed = 3;
|
||||
optional bytes SessionUsageTableEntry = 4; // interesting! try to figure out the connection between the usage table blob and KCB!
|
||||
}
|
||||
optional CENC CencId = 1;
|
||||
optional WebM WebmId = 2;
|
||||
optional ExistingLicense License = 3;
|
||||
}
|
||||
enum RequestType {
|
||||
NEW = 1;
|
||||
RENEWAL = 2;
|
||||
RELEASE = 3;
|
||||
}
|
||||
optional ClientIdentification ClientId = 1;
|
||||
optional ContentIdentification ContentId = 2;
|
||||
optional RequestType Type = 3;
|
||||
optional uint32 RequestTime = 4;
|
||||
optional bytes KeyControlNonceDeprecated = 5;
|
||||
optional ProtocolVersion ProtocolVersion = 6; // lacking symbols for this
|
||||
optional uint32 KeyControlNonce = 7;
|
||||
optional EncryptedClientIdentification EncryptedClientId = 8;
|
||||
}
|
||||
|
||||
// raw pssh hack
|
||||
message LicenseRequestRaw {
|
||||
message ContentIdentification {
|
||||
message CENC {
|
||||
optional bytes Pssh = 1; // the client's definition is opaque, it doesn't care about the contents, but the PSSH has a clear definition that is understood and requested by the server, thus I'll replace it with:
|
||||
//optional WidevineCencHeader Pssh = 1;
|
||||
optional LicenseType LicenseType = 2; // unfortunately the LicenseType symbols are not present, acceptable value seems to only be 1 (is this persist/don't persist? look into it!)
|
||||
optional bytes RequestId = 3;
|
||||
}
|
||||
message WebM {
|
||||
optional bytes Header = 1; // identical to CENC, aside from PSSH and the parent field number used
|
||||
optional LicenseType LicenseType = 2;
|
||||
optional bytes RequestId = 3;
|
||||
}
|
||||
message ExistingLicense {
|
||||
optional LicenseIdentification LicenseId = 1;
|
||||
optional uint32 SecondsSinceStarted = 2;
|
||||
optional uint32 SecondsSinceLastPlayed = 3;
|
||||
optional bytes SessionUsageTableEntry = 4; // interesting! try to figure out the connection between the usage table blob and KCB!
|
||||
}
|
||||
optional CENC CencId = 1;
|
||||
optional WebM WebmId = 2;
|
||||
optional ExistingLicense License = 3;
|
||||
}
|
||||
enum RequestType {
|
||||
NEW = 1;
|
||||
RENEWAL = 2;
|
||||
RELEASE = 3;
|
||||
}
|
||||
optional ClientIdentification ClientId = 1;
|
||||
optional ContentIdentification ContentId = 2;
|
||||
optional RequestType Type = 3;
|
||||
optional uint32 RequestTime = 4;
|
||||
optional bytes KeyControlNonceDeprecated = 5;
|
||||
optional ProtocolVersion ProtocolVersion = 6; // lacking symbols for this
|
||||
optional uint32 KeyControlNonce = 7;
|
||||
optional EncryptedClientIdentification EncryptedClientId = 8;
|
||||
}
|
||||
|
||||
|
||||
message ProvisionedDeviceInfo {
|
||||
enum WvSecurityLevel {
|
||||
LEVEL_UNSPECIFIED = 0;
|
||||
LEVEL_1 = 1;
|
||||
LEVEL_2 = 2;
|
||||
LEVEL_3 = 3;
|
||||
}
|
||||
optional uint32 SystemId = 1;
|
||||
optional string Soc = 2;
|
||||
optional string Manufacturer = 3;
|
||||
optional string Model = 4;
|
||||
optional string DeviceType = 5;
|
||||
optional uint32 ModelYear = 6;
|
||||
optional WvSecurityLevel SecurityLevel = 7;
|
||||
optional uint32 TestDevice = 8; // bool?
|
||||
}
|
||||
|
||||
|
||||
// todo: fill
|
||||
message ProvisioningOptions {
|
||||
}
|
||||
|
||||
// todo: fill
|
||||
message ProvisioningRequest {
|
||||
}
|
||||
|
||||
// todo: fill
|
||||
message ProvisioningResponse {
|
||||
}
|
||||
|
||||
message RemoteAttestation {
|
||||
optional EncryptedClientIdentification Certificate = 1;
|
||||
optional string Salt = 2;
|
||||
optional string Signature = 3;
|
||||
}
|
||||
|
||||
// todo: fill
|
||||
message SessionInit {
|
||||
}
|
||||
|
||||
// todo: fill
|
||||
message SessionState {
|
||||
}
|
||||
|
||||
// todo: fill
|
||||
message SignedCertificateStatusList {
|
||||
}
|
||||
|
||||
message SignedDeviceCertificate {
|
||||
|
||||
//optional bytes DeviceCertificate = 1; // again, they use a buffer where it's supposed to be a message, so we'll replace it with what it really is:
|
||||
optional DeviceCertificate _DeviceCertificate = 1; // how should we deal with duped names? will have to look at proto docs later
|
||||
optional bytes Signature = 2;
|
||||
optional SignedDeviceCertificate Signer = 3;
|
||||
}
|
||||
|
||||
|
||||
// todo: fill
|
||||
message SignedProvisioningMessage {
|
||||
}
|
||||
|
||||
// the root of all messages, from either server or client
|
||||
message SignedMessage {
|
||||
enum MessageType {
|
||||
LICENSE_REQUEST = 1;
|
||||
LICENSE = 2;
|
||||
ERROR_RESPONSE = 3;
|
||||
SERVICE_CERTIFICATE_REQUEST = 4;
|
||||
SERVICE_CERTIFICATE = 5;
|
||||
}
|
||||
optional MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
|
||||
optional bytes Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
|
||||
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
|
||||
optional bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
|
||||
optional bytes SessionKey = 4; // often RSA wrapped for licenses
|
||||
optional RemoteAttestation RemoteAttestation = 5;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// This message is copied from google's docs, not reversed:
|
||||
message WidevineCencHeader {
|
||||
enum Algorithm {
|
||||
UNENCRYPTED = 0;
|
||||
AESCTR = 1;
|
||||
};
|
||||
optional Algorithm algorithm = 1;
|
||||
repeated bytes key_id = 2;
|
||||
|
||||
// Content provider name.
|
||||
optional string provider = 3;
|
||||
|
||||
// A content identifier, specified by content provider.
|
||||
optional bytes content_id = 4;
|
||||
|
||||
// Track type. Acceptable values are SD, HD and AUDIO. Used to
|
||||
// differentiate content keys used by an asset.
|
||||
optional string track_type_deprecated = 5;
|
||||
|
||||
// The name of a registered policy to be used for this asset.
|
||||
optional string policy = 6;
|
||||
|
||||
// Crypto period index, for media using key rotation.
|
||||
optional uint32 crypto_period_index = 7;
|
||||
|
||||
// Optional protected context for group content. The grouped_license is a
|
||||
// serialized SignedMessage.
|
||||
optional bytes grouped_license = 8;
|
||||
|
||||
// Protection scheme identifying the encryption algorithm.
|
||||
// Represented as one of the following 4CC values:
|
||||
// 'cenc' (AESCTR), 'cbc1' (AESCBC),
|
||||
// 'cens' (AESCTR subsample), 'cbcs' (AESCBC subsample).
|
||||
optional uint32 protection_scheme = 9;
|
||||
|
||||
// Optional. For media using key rotation, this represents the duration
|
||||
// of each crypto period in seconds.
|
||||
optional uint32 crypto_period_seconds = 10;
|
||||
}
|
||||
|
||||
|
||||
// remove these when using it outside of protoc:
|
||||
|
||||
// from here on, it's just for testing, these messages don't exist in the binaries, I'm adding them to avoid detecting type programmatically
|
||||
message SignedLicenseRequest {
|
||||
enum MessageType {
|
||||
LICENSE_REQUEST = 1;
|
||||
LICENSE = 2;
|
||||
ERROR_RESPONSE = 3;
|
||||
SERVICE_CERTIFICATE_REQUEST = 4;
|
||||
SERVICE_CERTIFICATE = 5;
|
||||
}
|
||||
optional MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
|
||||
optional LicenseRequest Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
|
||||
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
|
||||
optional bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
|
||||
optional bytes SessionKey = 4; // often RSA wrapped for licenses
|
||||
optional RemoteAttestation RemoteAttestation = 5;
|
||||
}
|
||||
|
||||
// hack
|
||||
message SignedLicenseRequestRaw {
|
||||
enum MessageType {
|
||||
LICENSE_REQUEST = 1;
|
||||
LICENSE = 2;
|
||||
ERROR_RESPONSE = 3;
|
||||
SERVICE_CERTIFICATE_REQUEST = 4;
|
||||
SERVICE_CERTIFICATE = 5;
|
||||
}
|
||||
optional MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
|
||||
optional LicenseRequestRaw Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
|
||||
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
|
||||
optional bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
|
||||
optional bytes SessionKey = 4; // often RSA wrapped for licenses
|
||||
optional RemoteAttestation RemoteAttestation = 5;
|
||||
}
|
||||
|
||||
|
||||
message SignedLicense {
|
||||
enum MessageType {
|
||||
LICENSE_REQUEST = 1;
|
||||
LICENSE = 2;
|
||||
ERROR_RESPONSE = 3;
|
||||
SERVICE_CERTIFICATE_REQUEST = 4;
|
||||
SERVICE_CERTIFICATE = 5;
|
||||
}
|
||||
optional MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
|
||||
optional License Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
|
||||
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
|
||||
optional bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
|
||||
optional bytes SessionKey = 4; // often RSA wrapped for licenses
|
||||
optional RemoteAttestation RemoteAttestation = 5;
|
||||
}
|
||||
|
||||
message SignedServiceCertificate {
|
||||
enum MessageType {
|
||||
LICENSE_REQUEST = 1;
|
||||
LICENSE = 2;
|
||||
ERROR_RESPONSE = 3;
|
||||
SERVICE_CERTIFICATE_REQUEST = 4;
|
||||
SERVICE_CERTIFICATE = 5;
|
||||
}
|
||||
optional MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
|
||||
optional SignedDeviceCertificate Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
|
||||
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
|
||||
optional bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
|
||||
optional bytes SessionKey = 4; // often RSA wrapped for licenses
|
||||
optional RemoteAttestation RemoteAttestation = 5;
|
||||
}
|
||||
|
||||
//vmp support
|
||||
message FileHashes {
|
||||
message Signature {
|
||||
optional string filename = 1;
|
||||
optional bool test_signing = 2; //0 - release, 1 - testing
|
||||
optional bytes SHA512Hash = 3;
|
||||
optional bool main_exe = 4; //0 for dlls, 1 for exe, this is field 3 in file
|
||||
optional bytes signature = 5;
|
||||
}
|
||||
optional bytes signer = 1;
|
||||
repeated Signature signatures = 2;
|
||||
}
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,389 @@
|
|||
// beware proto3 won't show missing fields it seems, need to change to "proto2" and add "optional" before every field, and remove all the dummy enum members I added:
|
||||
syntax = "proto3";
|
||||
|
||||
// from x86 (partial), most of it from the ARM version:
|
||||
message ClientIdentification {
|
||||
enum TokenType {
|
||||
KEYBOX = 0;
|
||||
DEVICE_CERTIFICATE = 1;
|
||||
REMOTE_ATTESTATION_CERTIFICATE = 2;
|
||||
}
|
||||
message NameValue {
|
||||
string Name = 1;
|
||||
string Value = 2;
|
||||
}
|
||||
message ClientCapabilities {
|
||||
enum HdcpVersion {
|
||||
HDCP_NONE = 0;
|
||||
HDCP_V1 = 1;
|
||||
HDCP_V2 = 2;
|
||||
HDCP_V2_1 = 3;
|
||||
HDCP_V2_2 = 4;
|
||||
}
|
||||
uint32 ClientToken = 1;
|
||||
uint32 SessionToken = 2;
|
||||
uint32 VideoResolutionConstraints = 3;
|
||||
HdcpVersion MaxHdcpVersion = 4;
|
||||
uint32 OemCryptoApiVersion = 5;
|
||||
}
|
||||
TokenType Type = 1;
|
||||
//bytes Token = 2; // by default the client treats this as blob, but it's usually a DeviceCertificate, so for usefulness sake, I'm replacing it with this one:
|
||||
SignedDeviceCertificate Token = 2;
|
||||
repeated NameValue ClientInfo = 3;
|
||||
bytes ProviderClientToken = 4;
|
||||
uint32 LicenseCounter = 5;
|
||||
ClientCapabilities _ClientCapabilities = 6; // how should we deal with duped names? will have to look at proto docs later
|
||||
}
|
||||
|
||||
message DeviceCertificate {
|
||||
enum CertificateType {
|
||||
ROOT = 0;
|
||||
INTERMEDIATE = 1;
|
||||
USER_DEVICE = 2;
|
||||
SERVICE = 3;
|
||||
}
|
||||
//ProvisionedDeviceInfo.WvSecurityLevel Type = 1; // is this how one is supposed to call it? (it's an enum) there might be a bug here, with CertificateType getting confused with WvSecurityLevel, for now renaming it (verify against other binaries)
|
||||
CertificateType Type = 1;
|
||||
bytes SerialNumber = 2;
|
||||
uint32 CreationTimeSeconds = 3;
|
||||
bytes PublicKey = 4;
|
||||
uint32 SystemId = 5;
|
||||
uint32 TestDeviceDeprecated = 6; // is it bool or int?
|
||||
bytes ServiceId = 7; // service URL for service certificates
|
||||
}
|
||||
|
||||
// missing some references,
|
||||
message DeviceCertificateStatus {
|
||||
enum CertificateStatus {
|
||||
VALID = 0;
|
||||
REVOKED = 1;
|
||||
}
|
||||
bytes SerialNumber = 1;
|
||||
CertificateStatus Status = 2;
|
||||
ProvisionedDeviceInfo DeviceInfo = 4; // where is 3? is it deprecated?
|
||||
}
|
||||
|
||||
message DeviceCertificateStatusList {
|
||||
uint32 CreationTimeSeconds = 1;
|
||||
repeated DeviceCertificateStatus CertificateStatus = 2;
|
||||
}
|
||||
|
||||
message EncryptedClientIdentification {
|
||||
string ServiceId = 1;
|
||||
bytes ServiceCertificateSerialNumber = 2;
|
||||
bytes EncryptedClientId = 3;
|
||||
bytes EncryptedClientIdIv = 4;
|
||||
bytes EncryptedPrivacyKey = 5;
|
||||
}
|
||||
|
||||
// todo: fill (for this top-level type, it might be impossible/difficult)
|
||||
enum LicenseType {
|
||||
ZERO = 0;
|
||||
DEFAULT = 1; // do not know what this is either, but should be 1; on recent versions may go up to 3 (latest x86)
|
||||
}
|
||||
|
||||
// todo: fill (for this top-level type, it might be impossible/difficult)
|
||||
// this is just a guess because these globals got lost, but really, do we need more?
|
||||
enum ProtocolVersion {
|
||||
DUMMY = 0;
|
||||
CURRENT = 21; // don't have symbols for this
|
||||
}
|
||||
|
||||
|
||||
message LicenseIdentification {
|
||||
bytes RequestId = 1;
|
||||
bytes SessionId = 2;
|
||||
bytes PurchaseId = 3;
|
||||
LicenseType Type = 4;
|
||||
uint32 Version = 5;
|
||||
bytes ProviderSessionToken = 6;
|
||||
}
|
||||
|
||||
|
||||
message License {
|
||||
message Policy {
|
||||
uint32 CanPlay = 1;
|
||||
uint32 CanPersist = 2;
|
||||
uint32 CanRenew = 3;
|
||||
uint32 RentalDurationSeconds = 4;
|
||||
uint32 PlaybackDurationSeconds = 5;
|
||||
uint32 LicenseDurationSeconds = 6;
|
||||
uint32 RenewalRecoveryDurationSeconds = 7;
|
||||
string RenewalServerUrl = 8;
|
||||
uint32 RenewalDelaySeconds = 9;
|
||||
uint32 RenewalRetryIntervalSeconds = 10;
|
||||
uint32 RenewWithUsage = 11;
|
||||
uint32 UnknownPolicy12 = 12;
|
||||
}
|
||||
message KeyContainer {
|
||||
enum KeyType {
|
||||
_NOKEYTYPE = 0; // dummy, added to satisfy proto3, not present in original
|
||||
SIGNING = 1;
|
||||
CONTENT = 2;
|
||||
KEY_CONTROL = 3;
|
||||
OPERATOR_SESSION = 4;
|
||||
}
|
||||
enum SecurityLevel {
|
||||
_NOSECLEVEL = 0; // dummy, added to satisfy proto3, not present in original
|
||||
SW_SECURE_CRYPTO = 1;
|
||||
SW_SECURE_DECODE = 2;
|
||||
HW_SECURE_CRYPTO = 3;
|
||||
HW_SECURE_DECODE = 4;
|
||||
HW_SECURE_ALL = 5;
|
||||
}
|
||||
message OutputProtection {
|
||||
enum CGMS {
|
||||
COPY_FREE = 0;
|
||||
COPY_ONCE = 2;
|
||||
COPY_NEVER = 3;
|
||||
CGMS_NONE = 0x2A; // PC default!
|
||||
}
|
||||
ClientIdentification.ClientCapabilities.HdcpVersion Hdcp = 1; // it's most likely a copy of Hdcp version available here, but compiler optimized it away
|
||||
CGMS CgmsFlags = 2;
|
||||
}
|
||||
message KeyControl {
|
||||
bytes KeyControlBlock = 1; // what is this?
|
||||
bytes Iv = 2;
|
||||
}
|
||||
message OperatorSessionKeyPermissions {
|
||||
uint32 AllowEncrypt = 1;
|
||||
uint32 AllowDecrypt = 2;
|
||||
uint32 AllowSign = 3;
|
||||
uint32 AllowSignatureVerify = 4;
|
||||
}
|
||||
message VideoResolutionConstraint {
|
||||
uint32 MinResolutionPixels = 1;
|
||||
uint32 MaxResolutionPixels = 2;
|
||||
OutputProtection RequiredProtection = 3;
|
||||
}
|
||||
bytes Id = 1;
|
||||
bytes Iv = 2;
|
||||
bytes Key = 3;
|
||||
KeyType Type = 4;
|
||||
SecurityLevel Level = 5;
|
||||
OutputProtection RequiredProtection = 6;
|
||||
OutputProtection RequestedProtection = 7;
|
||||
KeyControl _KeyControl = 8; // duped names, etc
|
||||
OperatorSessionKeyPermissions _OperatorSessionKeyPermissions = 9; // duped names, etc
|
||||
repeated VideoResolutionConstraint VideoResolutionConstraints = 10;
|
||||
}
|
||||
LicenseIdentification Id = 1;
|
||||
Policy _Policy = 2; // duped names, etc
|
||||
repeated KeyContainer Key = 3;
|
||||
uint32 LicenseStartTime = 4;
|
||||
uint32 RemoteAttestationVerified = 5; // bool?
|
||||
bytes ProviderClientToken = 6;
|
||||
// there might be more, check with newer versions (I see field 7-8 in a lic)
|
||||
// this appeared in latest x86:
|
||||
uint32 ProtectionScheme = 7; // type unconfirmed fully, but it's likely as WidevineCencHeader describesit (fourcc)
|
||||
bytes UnknownHdcpDataField = 8;
|
||||
}
|
||||
|
||||
message LicenseError {
|
||||
enum Error {
|
||||
DUMMY_NO_ERROR = 0; // dummy, added to satisfy proto3
|
||||
INVALID_DEVICE_CERTIFICATE = 1;
|
||||
REVOKED_DEVICE_CERTIFICATE = 2;
|
||||
SERVICE_UNAVAILABLE = 3;
|
||||
}
|
||||
//LicenseRequest.RequestType ErrorCode; // clang mismatch
|
||||
Error ErrorCode = 1;
|
||||
}
|
||||
|
||||
message LicenseRequest {
|
||||
message ContentIdentification {
|
||||
message CENC {
|
||||
// bytes Pssh = 1; // the client's definition is opaque, it doesn't care about the contents, but the PSSH has a clear definition that is understood and requested by the server, thus I'll replace it with:
|
||||
WidevineCencHeader Pssh = 1;
|
||||
LicenseType LicenseType = 2; // unfortunately the LicenseType symbols are not present, acceptable value seems to only be 1
|
||||
bytes RequestId = 3;
|
||||
}
|
||||
message WebM {
|
||||
bytes Header = 1; // identical to CENC, aside from PSSH and the parent field number used
|
||||
LicenseType LicenseType = 2;
|
||||
bytes RequestId = 3;
|
||||
}
|
||||
message ExistingLicense {
|
||||
LicenseIdentification LicenseId = 1;
|
||||
uint32 SecondsSinceStarted = 2;
|
||||
uint32 SecondsSinceLastPlayed = 3;
|
||||
bytes SessionUsageTableEntry = 4;
|
||||
}
|
||||
CENC CencId = 1;
|
||||
WebM WebmId = 2;
|
||||
ExistingLicense License = 3;
|
||||
}
|
||||
enum RequestType {
|
||||
DUMMY_REQ_TYPE = 0; // dummy, added to satisfy proto3
|
||||
NEW = 1;
|
||||
RENEWAL = 2;
|
||||
RELEASE = 3;
|
||||
}
|
||||
ClientIdentification ClientId = 1;
|
||||
ContentIdentification ContentId = 2;
|
||||
RequestType Type = 3;
|
||||
uint32 RequestTime = 4;
|
||||
bytes KeyControlNonceDeprecated = 5;
|
||||
ProtocolVersion ProtocolVersion = 6; // lacking symbols for this
|
||||
uint32 KeyControlNonce = 7;
|
||||
EncryptedClientIdentification EncryptedClientId = 8;
|
||||
}
|
||||
|
||||
message ProvisionedDeviceInfo {
|
||||
enum WvSecurityLevel {
|
||||
LEVEL_UNSPECIFIED = 0;
|
||||
LEVEL_1 = 1;
|
||||
LEVEL_2 = 2;
|
||||
LEVEL_3 = 3;
|
||||
}
|
||||
uint32 SystemId = 1;
|
||||
string Soc = 2;
|
||||
string Manufacturer = 3;
|
||||
string Model = 4;
|
||||
string DeviceType = 5;
|
||||
uint32 ModelYear = 6;
|
||||
WvSecurityLevel SecurityLevel = 7;
|
||||
uint32 TestDevice = 8; // bool?
|
||||
}
|
||||
|
||||
|
||||
// todo: fill
|
||||
message ProvisioningOptions {
|
||||
}
|
||||
|
||||
// todo: fill
|
||||
message ProvisioningRequest {
|
||||
}
|
||||
|
||||
// todo: fill
|
||||
message ProvisioningResponse {
|
||||
}
|
||||
|
||||
message RemoteAttestation {
|
||||
EncryptedClientIdentification Certificate = 1;
|
||||
string Salt = 2;
|
||||
string Signature = 3;
|
||||
}
|
||||
|
||||
// todo: fill
|
||||
message SessionInit {
|
||||
}
|
||||
|
||||
// todo: fill
|
||||
message SessionState {
|
||||
}
|
||||
|
||||
// todo: fill
|
||||
message SignedCertificateStatusList {
|
||||
}
|
||||
|
||||
message SignedDeviceCertificate {
|
||||
|
||||
//bytes DeviceCertificate = 1; // again, they use a buffer where it's supposed to be a message, so we'll replace it with what it really is:
|
||||
DeviceCertificate _DeviceCertificate = 1; // how should we deal with duped names? will have to look at proto docs later
|
||||
bytes Signature = 2;
|
||||
SignedDeviceCertificate Signer = 3;
|
||||
}
|
||||
|
||||
|
||||
// todo: fill
|
||||
message SignedProvisioningMessage {
|
||||
}
|
||||
|
||||
// the root of all messages, from either server or client
|
||||
message SignedMessage {
|
||||
enum MessageType {
|
||||
DUMMY_MSG_TYPE = 0; // dummy, added to satisfy proto3
|
||||
LICENSE_REQUEST = 1;
|
||||
LICENSE = 2;
|
||||
ERROR_RESPONSE = 3;
|
||||
SERVICE_CERTIFICATE_REQUEST = 4;
|
||||
SERVICE_CERTIFICATE = 5;
|
||||
}
|
||||
MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
|
||||
bytes Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
|
||||
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
|
||||
bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
|
||||
bytes SessionKey = 4; // often RSA wrapped for licenses
|
||||
RemoteAttestation RemoteAttestation = 5;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// This message is copied from google's docs, not reversed:
|
||||
message WidevineCencHeader {
|
||||
enum Algorithm {
|
||||
UNENCRYPTED = 0;
|
||||
AESCTR = 1;
|
||||
};
|
||||
Algorithm algorithm = 1;
|
||||
repeated bytes key_id = 2;
|
||||
|
||||
// Content provider name.
|
||||
string provider = 3;
|
||||
|
||||
// A content identifier, specified by content provider.
|
||||
bytes content_id = 4;
|
||||
|
||||
// Track type. Acceptable values are SD, HD and AUDIO. Used to
|
||||
// differentiate content keys used by an asset.
|
||||
string track_type_deprecated = 5;
|
||||
|
||||
// The name of a registered policy to be used for this asset.
|
||||
string policy = 6;
|
||||
|
||||
// Crypto period index, for media using key rotation.
|
||||
uint32 crypto_period_index = 7;
|
||||
|
||||
// Optional protected context for group content. The grouped_license is a
|
||||
// serialized SignedMessage.
|
||||
bytes grouped_license = 8;
|
||||
|
||||
// Protection scheme identifying the encryption algorithm.
|
||||
// Represented as one of the following 4CC values:
|
||||
// 'cenc' (AESCTR), 'cbc1' (AESCBC),
|
||||
// 'cens' (AESCTR subsample), 'cbcs' (AESCBC subsample).
|
||||
uint32 protection_scheme = 9;
|
||||
|
||||
// Optional. For media using key rotation, this represents the duration
|
||||
// of each crypto period in seconds.
|
||||
uint32 crypto_period_seconds = 10;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// from here on, it's just for testing, these messages don't exist in the binaries, I'm adding them to avoid detecting type programmatically
|
||||
message SignedLicenseRequest {
|
||||
enum MessageType {
|
||||
DUMMY_MSG_TYPE = 0; // dummy, added to satisfy proto3
|
||||
LICENSE_REQUEST = 1;
|
||||
LICENSE = 2;
|
||||
ERROR_RESPONSE = 3;
|
||||
SERVICE_CERTIFICATE_REQUEST = 4;
|
||||
SERVICE_CERTIFICATE = 5;
|
||||
}
|
||||
MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
|
||||
LicenseRequest Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
|
||||
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
|
||||
bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
|
||||
bytes SessionKey = 4; // often RSA wrapped for licenses
|
||||
RemoteAttestation RemoteAttestation = 5;
|
||||
}
|
||||
|
||||
message SignedLicense {
|
||||
enum MessageType {
|
||||
DUMMY_MSG_TYPE = 0; // dummy, added to satisfy proto3
|
||||
LICENSE_REQUEST = 1;
|
||||
LICENSE = 2;
|
||||
ERROR_RESPONSE = 3;
|
||||
SERVICE_CERTIFICATE_REQUEST = 4;
|
||||
SERVICE_CERTIFICATE = 5;
|
||||
}
|
||||
MessageType Type = 1; // has in incorrect overlap with License_KeyContainer_SecurityLevel
|
||||
License Msg = 2; // this has to be casted dynamically, to LicenseRequest, License or LicenseError (? unconfirmed), for Request, no other fields but Type need to be present
|
||||
// for SERVICE_CERTIFICATE, only Type and Msg are present, and it's just a DeviceCertificate with CertificateType set to SERVICE
|
||||
bytes Signature = 3; // might be different type of signatures (ex. RSA vs AES CMAC(??), unconfirmed for now)
|
||||
bytes SessionKey = 4; // often RSA wrapped for licenses
|
||||
RemoteAttestation RemoteAttestation = 5;
|
||||
}
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,14 @@
|
|||
import binascii
|
||||
|
||||
class Key:
|
||||
def __init__(self, kid, type, key, permissions=[]):
|
||||
self.kid = kid
|
||||
self.type = type
|
||||
self.key = key
|
||||
self.permissions = permissions
|
||||
|
||||
def __repr__(self):
|
||||
if self.type == "OPERATOR_SESSION":
|
||||
return "key(kid={}, type={}, key={}, permissions={})".format(self.kid, self.type, binascii.hexlify(self.key), self.permissions)
|
||||
else:
|
||||
return "key(kid={}, type={}, key={})".format(self.kid, self.type, binascii.hexlify(self.key))
|
|
@ -0,0 +1,18 @@
|
|||
class Session:
|
||||
def __init__(self, session_id, init_data, device_config, offline):
|
||||
self.session_id = session_id
|
||||
self.init_data = init_data
|
||||
self.offline = offline
|
||||
self.device_config = device_config
|
||||
self.device_key = None
|
||||
self.session_key = None
|
||||
self.derived_keys = {
|
||||
'enc': None,
|
||||
'auth_1': None,
|
||||
'auth_2': None
|
||||
}
|
||||
self.license_request = None
|
||||
self.license = None
|
||||
self.service_certificate = None
|
||||
self.privacy_mode = False
|
||||
self.keys = []
|
|
@ -0,0 +1,102 @@
|
|||
try:
|
||||
from google.protobuf.internal.decoder import _DecodeVarint as _di # this was tested to work with protobuf 3, but it's an internal API (any varint decoder might work)
|
||||
except ImportError:
|
||||
# this is generic and does not depend on pb internals, however it will decode "larger" possible numbers than pb decoder which has them fixed
|
||||
def LEB128_decode(buffer, pos, limit = 64):
|
||||
result = 0
|
||||
shift = 0
|
||||
while True:
|
||||
b = buffer[pos]
|
||||
pos += 1
|
||||
result |= ((b & 0x7F) << shift)
|
||||
if not (b & 0x80):
|
||||
return (result, pos)
|
||||
shift += 7
|
||||
if shift > limit:
|
||||
raise Exception("integer too large, shift: {}".format(shift))
|
||||
_di = LEB128_decode
|
||||
|
||||
|
||||
class FromFileMixin:
|
||||
@classmethod
|
||||
def from_file(cls, filename):
|
||||
"""Load given a filename"""
|
||||
with open(filename,"rb") as f:
|
||||
return cls(f.read())
|
||||
|
||||
# the signatures use a format internally similar to
|
||||
# protobuf's encoding, but without wire types
|
||||
class VariableReader(FromFileMixin):
|
||||
"""Protobuf-like encoding reader"""
|
||||
|
||||
def __init__(self, buf):
|
||||
self.buf = buf
|
||||
self.pos = 0
|
||||
self.size = len(buf)
|
||||
|
||||
def read_int(self):
|
||||
"""Read a variable length integer"""
|
||||
# _DecodeVarint will take care of out of range errors
|
||||
(val, nextpos) = _di(self.buf, self.pos)
|
||||
self.pos = nextpos
|
||||
return val
|
||||
|
||||
def read_bytes_raw(self, size):
|
||||
"""Read size bytes"""
|
||||
b = self.buf[self.pos:self.pos+size]
|
||||
self.pos += size
|
||||
return b
|
||||
|
||||
def read_bytes(self):
|
||||
"""Read a bytes object"""
|
||||
size = self.read_int()
|
||||
return self.read_bytes_raw(size)
|
||||
|
||||
def is_end(self):
|
||||
return (self.size == self.pos)
|
||||
|
||||
|
||||
class TaggedReader(VariableReader):
|
||||
"""Tagged reader, needed for implementing a WideVine signature reader"""
|
||||
|
||||
def read_tag(self):
|
||||
"""Read a tagged buffer"""
|
||||
return (self.read_int(), self.read_bytes())
|
||||
|
||||
def read_all_tags(self, max_tag=3):
|
||||
tags = {}
|
||||
while (not self.is_end()):
|
||||
(tag, bytes) = self.read_tag()
|
||||
if (tag > max_tag):
|
||||
raise IndexError("tag out of bound: got {}, max {}".format(tag, max_tag))
|
||||
|
||||
tags[tag] = bytes
|
||||
return tags
|
||||
|
||||
class WideVineSignatureReader(FromFileMixin):
|
||||
"""Parses a widevine .sig signature file."""
|
||||
|
||||
SIGNER_TAG = 1
|
||||
SIGNATURE_TAG = 2
|
||||
ISMAINEXE_TAG = 3
|
||||
|
||||
def __init__(self, buf):
|
||||
reader = TaggedReader(buf)
|
||||
self.version = reader.read_int()
|
||||
if (self.version != 0):
|
||||
raise Exception("Unsupported signature format version {}".format(self.version))
|
||||
self.tags = reader.read_all_tags()
|
||||
|
||||
self.signer = self.tags[self.SIGNER_TAG]
|
||||
self.signature = self.tags[self.SIGNATURE_TAG]
|
||||
|
||||
extra = self.tags[self.ISMAINEXE_TAG]
|
||||
if (len(extra) != 1 or (extra[0] > 1)):
|
||||
raise Exception("Unexpected 'ismainexe' field value (not '\\x00' or '\\x01'), please check: {0}".format(extra))
|
||||
|
||||
self.mainexe = bool(extra[0])
|
||||
|
||||
@classmethod
|
||||
def get_tags(cls, filename):
|
||||
"""Return a dictionary of each tag in the signature file"""
|
||||
return cls.from_file(filename).tags
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,27 @@
|
|||
import json, sys, time
|
||||
import pywidevine.clients.blim.config as blim_cfg
|
||||
from os.path import join
|
||||
|
||||
BLIMLOGINDATA_FILE = join(blim_cfg.COOKIES_FOLDER, 'blim_login_data.json')
|
||||
|
||||
login_cfg = {
|
||||
'email': 'teste@blim.com',
|
||||
'password': 'teste1234'
|
||||
}
|
||||
|
||||
def login(SESSION, save_login=False):
|
||||
post_data = {"email": login_cfg['email'], "password": login_cfg['password'], "remember": True, "clientId":5}
|
||||
login_resp = SESSION.post(url=blim_cfg.ENDPOINTS['login'], json=post_data)
|
||||
if login_resp.json()['data'] == []:
|
||||
print(login_resp.json()['messages'][0]['value'])
|
||||
sys.exit(1)
|
||||
|
||||
costumer_key = login_resp.json()['data']['sessionId']
|
||||
access_key_secret = login_resp.json()['data']['accessToken']
|
||||
login_data = {'COSTUMER_KEY': costumer_key, 'SECRET_KEY': access_key_secret}
|
||||
if save_login:
|
||||
with open(BLIMLOGINDATA_FILE, 'w', encoding='utf-8') as f:
|
||||
f.write(json.dumps(login_data, indent=4))
|
||||
f.close()
|
||||
|
||||
return SESSION, costumer_key, access_key_secret
|
|
@ -0,0 +1,59 @@
|
|||
from shutil import which
|
||||
from os.path import dirname, realpath, join
|
||||
from os import pathsep, environ
|
||||
|
||||
ENDPOINTS = {
|
||||
'login': 'https://api.blim.com/account/login',
|
||||
'seasons': 'https://api.blim.com/asset/',
|
||||
'content': 'https://api.blim.com/play/resume/',
|
||||
'config': 'https://www.blim.com/secure/play/resume/configuration?config_token=portal-config'
|
||||
}
|
||||
|
||||
protection_keys = {
|
||||
'094af042a17556c5b28a176deffdd4a7:14319c175eb145071fe189d2b1da8634',
|
||||
'4ae10c2357e250e088bb8a5ab044dd50:e7f47e2b948e9222cf4d24b51881ec04',
|
||||
'b6e16839eebd4ff6ab768d482d8d2b6a:ad6c675e0810741538f7f2f0b4099d9e'
|
||||
}
|
||||
|
||||
init_files = {
|
||||
'1080p': 'https://cdn.discordapp.com/attachments/686581369249333291/857062526856200252/video_init_1920x1080.bin',
|
||||
'480p': 'https://cdn.discordapp.com/attachments/686581369249333291/857062525421092944/video_640x480.bin',
|
||||
'audio': 'https://cdn.discordapp.com/attachments/686581369249333291/857104327742193735/audio_init.bin'
|
||||
}
|
||||
|
||||
SCRIPT_PATH = dirname(realpath('blimtv'))
|
||||
|
||||
BINARIES_FOLDER = join(SCRIPT_PATH, 'binaries')
|
||||
COOKIES_FOLDER = join(SCRIPT_PATH, 'cookies')
|
||||
|
||||
MP4DECRYPT_BINARY = 'mp4decrypt'
|
||||
MP4DUMP_BINARY = 'mp4dump'
|
||||
MKVMERGE_BINARY = 'mkvmerge'
|
||||
FFMPEG_BINARY = 'ffmpeg'
|
||||
ARIA2C_BINARY = 'aria2c'
|
||||
|
||||
# Add binaries folder to PATH as the first item
|
||||
environ['PATH'] = pathsep.join([BINARIES_FOLDER, environ['PATH']])
|
||||
|
||||
MP4DECRYPT = which(MP4DECRYPT_BINARY)
|
||||
MP4DUMP = which(MP4DUMP_BINARY)
|
||||
MKVMERGE = which(MKVMERGE_BINARY)
|
||||
FFMPEG = which(FFMPEG_BINARY)
|
||||
ARIA2C = which(ARIA2C_BINARY)
|
||||
|
||||
class PrDownloaderConfig(object):
|
||||
def __init__(self, ism, base_url, output_file, bitrate, init_url, file_type):
|
||||
self.ism = ism
|
||||
self.base_url = base_url
|
||||
self.output_file = output_file
|
||||
self.bitrate = bitrate
|
||||
self.init_url = init_url
|
||||
self.file_type = file_type
|
||||
|
||||
class WvDownloaderConfig(object):
|
||||
def __init__(self, mpd, base_url, output_file, format_id, file_type):
|
||||
self.mpd = mpd
|
||||
self.base_url = base_url
|
||||
self.output_file = output_file
|
||||
self.format_id = format_id
|
||||
self.file_type = file_type
|
|
@ -0,0 +1,117 @@
|
|||
import threading, isodate
|
||||
import requests
|
||||
import math
|
||||
import urllib.parse
|
||||
|
||||
from requests.sessions import session
|
||||
from tqdm import tqdm
|
||||
from queue import Queue
|
||||
|
||||
dlthreads = 24
|
||||
|
||||
class PrDownloader(object):
|
||||
def __init__(self, config):
|
||||
self.ism = config.ism
|
||||
self.output_file = config.output_file
|
||||
self.bitrate = config.bitrate
|
||||
self.base_url = config.base_url
|
||||
self.init_url = config.init_url
|
||||
self.config = config
|
||||
|
||||
def process_url_templace(self, template, representation_id, bandwidth, time, number):
|
||||
if representation_id is not None: result = template.replace('$RepresentationID$', representation_id)
|
||||
if number is not None:
|
||||
nstart = result.find('$Number')
|
||||
if nstart >= 0:
|
||||
nend = result.find('$', nstart+1)
|
||||
if nend >= 0:
|
||||
var = result[nstart+1 : nend]
|
||||
if 'Number%' in var:
|
||||
value = var[6:] % (int(number))
|
||||
else:
|
||||
value = number
|
||||
result = result.replace('$'+var+'$', value)
|
||||
if bandwidth is not None: result = result.replace('$Bandwidth$', bandwidth)
|
||||
if time is not None: result = result.replace('$Time$', time)
|
||||
result = result.replace('$$', '$').replace('../', '')
|
||||
return result
|
||||
|
||||
def generate_segments(self):
|
||||
quality_level = self.get_quality_level()
|
||||
return self.get_segments(quality_level)
|
||||
|
||||
def get_segments(self, stream_index):
|
||||
urls = []
|
||||
urls.append(self.init_url)
|
||||
t = 0
|
||||
for seg in stream_index["c"]:
|
||||
if '@t' in seg:
|
||||
t = seg['@t']
|
||||
for i in range(int(seg.get('@r', 0)) + 1):
|
||||
path = stream_index['@Url'].format(**{
|
||||
'bitrate': self.bitrate,
|
||||
'start time': t})
|
||||
url = urllib.parse.urljoin(self.base_url, path)
|
||||
urls.append(url)
|
||||
t += int(seg['@d'])
|
||||
return urls
|
||||
|
||||
def get_quality_level(self):
|
||||
X = [item for (i, item) in enumerate(self.ism['SmoothStreamingMedia']['StreamIndex']) if self.config.file_type in item.get('@Type')][0]
|
||||
return X
|
||||
|
||||
def run(self):
|
||||
urls = self.generate_segments()
|
||||
work_q = Queue()
|
||||
result_q = Queue()
|
||||
|
||||
print('\n' + self.output_file)
|
||||
pool = [WorkerThread(work_q=work_q, result_q=result_q) for i in range(dlthreads)]
|
||||
for thread in pool:
|
||||
thread.start()
|
||||
|
||||
work_count = 0
|
||||
for seg_url in urls:
|
||||
work_q.put((work_count, seg_url))
|
||||
work_count += 1
|
||||
results = []
|
||||
|
||||
for _ in tqdm(range(work_count)):
|
||||
results.append(result_q.get())
|
||||
outfile = open(self.output_file , 'wb+')
|
||||
sortedr = sorted(results, key=lambda v: v[0])
|
||||
for r in sortedr:
|
||||
outfile.write(r[1])
|
||||
outfile.close()
|
||||
del results
|
||||
print('Done!')
|
||||
|
||||
class Downloader:
|
||||
def __init__(self):
|
||||
self.session = requests.Session()
|
||||
|
||||
def DownloadSegment(self, url):
|
||||
resp = self.session.get(url, stream=True)
|
||||
resp.raw.decode_content = True
|
||||
data = resp.raw.read()
|
||||
return data
|
||||
|
||||
class WorkerThread(threading.Thread):
|
||||
def __init__(self, work_q, result_q):
|
||||
super(WorkerThread, self).__init__()
|
||||
self.work_q = work_q
|
||||
self.result_q = result_q
|
||||
self.stoprequest = threading.Event()
|
||||
self.downloader = Downloader()
|
||||
|
||||
def run(self):
|
||||
while not self.stoprequest.isSet():
|
||||
try:
|
||||
(seq, url) = self.work_q.get(True, 0.05)
|
||||
self.result_q.put((seq, self.downloader.DownloadSegment(url)))
|
||||
except:
|
||||
continue
|
||||
|
||||
def join(self, timeout=None):
|
||||
self.stoprequest.set()
|
||||
super(WorkerThread, self).join(timeout)
|
|
@ -0,0 +1,155 @@
|
|||
import threading, isodate
|
||||
import requests
|
||||
import math
|
||||
|
||||
from requests.sessions import session
|
||||
from tqdm import tqdm
|
||||
from queue import Queue
|
||||
|
||||
dlthreads = 24
|
||||
|
||||
class WvDownloader(object):
|
||||
def __init__(self, config):
|
||||
self.mpd = config.mpd
|
||||
self.output_file = config.output_file
|
||||
self.mimetype = config.file_type
|
||||
self.formatId = config.format_id
|
||||
self.config = config
|
||||
|
||||
def process_url_templace(self, template, representation_id, bandwidth, time, number):
|
||||
if representation_id is not None: result = template.replace('$RepresentationID$', representation_id)
|
||||
if number is not None:
|
||||
nstart = result.find('$Number')
|
||||
if nstart >= 0:
|
||||
nend = result.find('$', nstart+1)
|
||||
if nend >= 0:
|
||||
var = result[nstart+1 : nend]
|
||||
if 'Number%' in var:
|
||||
value = var[6:] % (int(number))
|
||||
else:
|
||||
value = number
|
||||
result = result.replace('$'+var+'$', value)
|
||||
if bandwidth is not None: result = result.replace('$Bandwidth$', bandwidth)
|
||||
if time is not None: result = result.replace('$Time$', time)
|
||||
result = result.replace('$$', '$').replace('../', '')
|
||||
return result
|
||||
|
||||
def generate_segments(self):
|
||||
segment_template = self.get_segment_template()
|
||||
return self.get_segments(segment_template)
|
||||
|
||||
def get_segments(self, segment_template):
|
||||
urls = []
|
||||
urls.append(self.config.base_url + segment_template['@initialization'].replace('$RepresentationID$', self.config.format_id))
|
||||
print(urls)
|
||||
try:
|
||||
current_number = int(segment_template.get("@startNumber", 0))
|
||||
period_duration = self.get_duration()
|
||||
segment_duration = int(segment_template["@duration"]) / int(segment_template["@timescale"])
|
||||
total_segments = math.ceil(period_duration / segment_duration)
|
||||
for _ in range(current_number, current_number + total_segments):
|
||||
urls.append(self.config.base_url + self.process_url_templace(segment_template['@media'],
|
||||
representation_id=self.config.format_id,
|
||||
bandwidth=None, time="0", number=str(current_number)))
|
||||
current_number += 1
|
||||
except KeyError:
|
||||
current_number = 0
|
||||
current_time = 0
|
||||
for seg in segment_template["SegmentTimeline"]["S"]:
|
||||
if '@t' in seg:
|
||||
current_time = seg['@t']
|
||||
for i in range(int(seg.get('@r', 0)) + 1):
|
||||
urls.append(self.config.base_url + self.process_url_templace(segment_template['@media'],
|
||||
representation_id=self.config.format_id,
|
||||
bandwidth=None, time=str(current_time), number=str(current_number)))
|
||||
current_number += 1
|
||||
current_time += seg['@d']
|
||||
return urls
|
||||
|
||||
def get_duration(self):
|
||||
media_duration = self.mpd["MPD"]["@mediaPresentationDuration"]
|
||||
return isodate.parse_duration(media_duration).total_seconds()
|
||||
|
||||
def get_segment_template(self):
|
||||
tracks = self.mpd['MPD']['Period']['AdaptationSet']
|
||||
|
||||
segment_template = []
|
||||
if self.mimetype == "video/mp4":
|
||||
for video_track in tracks:
|
||||
if video_track["@mimeType"] == self.mimetype:
|
||||
for v in video_track["Representation"]:
|
||||
segment_template = v["SegmentTemplate"]
|
||||
|
||||
if self.mimetype == "audio/mp4":
|
||||
for audio_track in tracks:
|
||||
if audio_track["@mimeType"] == self.mimetype:
|
||||
try:
|
||||
segment_template = audio_track["SegmentTemplate"]
|
||||
except (KeyError, TypeError):
|
||||
for a in self.list_representation(audio_track):
|
||||
segment_template = a["SegmentTemplate"]
|
||||
|
||||
return segment_template
|
||||
|
||||
def list_representation(self, x):
|
||||
if isinstance(x['Representation'], list):
|
||||
X = x['Representation']
|
||||
else:
|
||||
X = [x['Representation']]
|
||||
return X
|
||||
|
||||
def run(self):
|
||||
urls = self.generate_segments()
|
||||
work_q = Queue()
|
||||
result_q = Queue()
|
||||
|
||||
print('\n' + self.output_file)
|
||||
pool = [WorkerThread(work_q=work_q, result_q=result_q) for i in range(dlthreads)]
|
||||
for thread in pool:
|
||||
thread.start()
|
||||
|
||||
work_count = 0
|
||||
for seg_url in urls:
|
||||
work_q.put((work_count, seg_url))
|
||||
work_count += 1
|
||||
results = []
|
||||
|
||||
for _ in tqdm(range(work_count)):
|
||||
results.append(result_q.get())
|
||||
outfile = open(self.output_file , 'wb+')
|
||||
sortedr = sorted(results, key=lambda v: v[0])
|
||||
for r in sortedr:
|
||||
outfile.write(r[1])
|
||||
outfile.close()
|
||||
del results
|
||||
print('Done!')
|
||||
|
||||
class Downloader:
|
||||
def __init__(self):
|
||||
self.session = requests.Session()
|
||||
|
||||
def DownloadSegment(self, url):
|
||||
resp = self.session.get(url, stream=True)
|
||||
resp.raw.decode_content = True
|
||||
data = resp.raw.read()
|
||||
return data
|
||||
|
||||
class WorkerThread(threading.Thread):
|
||||
def __init__(self, work_q, result_q):
|
||||
super(WorkerThread, self).__init__()
|
||||
self.work_q = work_q
|
||||
self.result_q = result_q
|
||||
self.stoprequest = threading.Event()
|
||||
self.downloader = Downloader()
|
||||
|
||||
def run(self):
|
||||
while not self.stoprequest.isSet():
|
||||
try:
|
||||
(seq, url) = self.work_q.get(True, 0.05)
|
||||
self.result_q.put((seq, self.downloader.DownloadSegment(url)))
|
||||
except:
|
||||
continue
|
||||
|
||||
def join(self, timeout=None):
|
||||
self.stoprequest.set()
|
||||
super(WorkerThread, self).join(timeout)
|
|
@ -0,0 +1,104 @@
|
|||
import isodate
|
||||
|
||||
def get_mpd_list(mpd):
|
||||
def get_height(width, height):
|
||||
if width == '1920':
|
||||
return '1080'
|
||||
elif width in ('1280', '1248'):
|
||||
return '720'
|
||||
else:
|
||||
return height
|
||||
|
||||
length = isodate.parse_duration(mpd['MPD']['@mediaPresentationDuration']).total_seconds()
|
||||
period = mpd['MPD']['Period']
|
||||
base_url = period['BaseURL']
|
||||
tracks = period['AdaptationSet']
|
||||
|
||||
video_list = []
|
||||
for video_tracks in tracks:
|
||||
if video_tracks['@mimeType'] == 'video/mp4':
|
||||
for x in video_tracks['Representation']:
|
||||
try:
|
||||
codecs = x['@codecs']
|
||||
except KeyError:
|
||||
codecs = video_tracks['@codecs']
|
||||
|
||||
videoDict = {
|
||||
'Height':get_height(x['@width'], x['@height']),
|
||||
'Width':x['@width'],
|
||||
'Bandwidth':x['@bandwidth'],
|
||||
'ID':x['@id'],
|
||||
'Codec':codecs}
|
||||
video_list.append(videoDict)
|
||||
|
||||
def list_representation(x):
|
||||
if isinstance(x['Representation'], list):
|
||||
X = x['Representation']
|
||||
else:
|
||||
X = [x['Representation']]
|
||||
return X
|
||||
|
||||
def replace_code_lang(x):
|
||||
X = x.replace('es', 'es-la').replace('en', 'es-la')
|
||||
return X
|
||||
|
||||
audio_list = []
|
||||
for audio_tracks in tracks:
|
||||
if audio_tracks['@mimeType'] == 'audio/mp4':
|
||||
for x in list_representation(audio_tracks):
|
||||
try:
|
||||
codecs = x['@codecs']
|
||||
except KeyError:
|
||||
codecs = audio_tracks['@codecs']
|
||||
audio_dict = {
|
||||
'Bandwidth':x['@bandwidth'],
|
||||
'ID':x['@id'],
|
||||
'Language':audio_tracks["@lang"],
|
||||
'Codec':codecs}
|
||||
audio_list.append(audio_dict)
|
||||
|
||||
subs_list = []
|
||||
for subs_tracks in tracks:
|
||||
if subs_tracks['@mimeType'] == 'text/vtt':
|
||||
for x in list_representation(subs_tracks):
|
||||
subs_dict = {
|
||||
'ID':x['@id'],
|
||||
'Language':replace_code_lang(subs_tracks["@lang"]),
|
||||
'Codec':subs_tracks['@mimeType'],
|
||||
'File_URL':base_url + x['BaseURL'].replace('../', '')}
|
||||
subs_list.append(subs_dict)
|
||||
|
||||
return length, video_list, audio_list, subs_list
|
||||
|
||||
def get_ism_list(ism):
|
||||
length = float(ism['SmoothStreamingMedia']['@Duration'][:-7])
|
||||
tracks = ism['SmoothStreamingMedia']["StreamIndex"]
|
||||
|
||||
video_list = []
|
||||
for video_tracks in tracks:
|
||||
if video_tracks['@Type'] == 'video':
|
||||
for x in video_tracks['QualityLevel']:
|
||||
videoDict = {
|
||||
'Height':x['@MaxHeight'],
|
||||
'Width':x['@MaxWidth'],
|
||||
'ID':'0',
|
||||
'Bandwidth':x['@Bitrate'],
|
||||
'Codec':x["@FourCC"]}
|
||||
video_list.append(videoDict)
|
||||
|
||||
def replace_code_lang(x):
|
||||
X = x.replace('255', 'es-la')
|
||||
return X
|
||||
|
||||
audio_list = []
|
||||
for audio_tracks in tracks:
|
||||
if audio_tracks['@Type'] == 'audio':
|
||||
for x in audio_tracks["QualityLevel"]:
|
||||
audio_dict = {
|
||||
'Bandwidth':x['@Bitrate'],
|
||||
'ID':'0',
|
||||
'Language':replace_code_lang(x["@AudioTag"]),
|
||||
'Codec':x["@FourCC"]}
|
||||
audio_list.append(audio_dict)
|
||||
|
||||
return length, video_list, audio_list, []
|
|
@ -0,0 +1,42 @@
|
|||
import re
|
||||
from unidecode import unidecode
|
||||
|
||||
def get_release_tag(default_filename, vcodec, video_height, acodec, channels, bitrate, module, tag, isDual):
|
||||
video_codec = ''
|
||||
|
||||
if 'avc' in vcodec:
|
||||
video_codec = 'H.264'
|
||||
if 'hvc' in vcodec:
|
||||
video_codec = 'H.265'
|
||||
elif 'dvh' in vcodec:
|
||||
video_codec = 'HDR'
|
||||
|
||||
if isDual==False:
|
||||
audio_codec = ''
|
||||
if 'mp4a' in acodec:
|
||||
audio_codec = 'AAC'
|
||||
if acodec == 'ac-3':
|
||||
audio_codec = 'DD'
|
||||
if acodec == 'ec-3':
|
||||
audio_codec = 'DDP'
|
||||
elif acodec == 'ec-3' and bitrate > 700000:
|
||||
audio_codec = 'Atmos'
|
||||
|
||||
audio_channels = ''
|
||||
if channels == '2':
|
||||
audio_channels = '2.0'
|
||||
elif channels == '6':
|
||||
audio_channels = '5.1'
|
||||
audio_format = audio_codec + audio_channels
|
||||
else:
|
||||
audio_format = 'DUAL'
|
||||
|
||||
|
||||
default_filename = default_filename.replace('&', '.and.')
|
||||
default_filename = re.sub(r'[]!"#$%\'()*+,:;<=>?@\\^_`{|}~[-]', '', default_filename)
|
||||
default_filename = default_filename.replace(' ', '.')
|
||||
default_filename = re.sub(r'\.{2,}', '.', default_filename)
|
||||
default_filename = unidecode(default_filename)
|
||||
|
||||
output_name = '{}.{}p.{}.WEB-DL.{}.{}-{}'.format(default_filename, video_height, str(module), audio_format, video_codec, tag)
|
||||
return output_name
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,106 @@
|
|||
import base64, time, requests, os, json
|
||||
import pywidevine.clients.hbomax.config as hmaxcfg
|
||||
from os.path import join
|
||||
|
||||
SESSION = requests.Session()
|
||||
HMAXTOKEN_FILE = join(hmaxcfg.COOKIES_FOLDER, 'hmax_login_data.json')
|
||||
|
||||
|
||||
login_config = {
|
||||
'username': 'rivas909@me.com',
|
||||
'password': 'NoCambieselPass.12345'
|
||||
}
|
||||
|
||||
def login(SESSION, login_endpoint, content_url, save_login=True):
|
||||
def get_free_token(token_url):
|
||||
token_data = hmaxcfg.get_token_info()
|
||||
free_token = requests.post(url=token_url, headers=token_data['headers'], json=token_data['data'])
|
||||
if int(free_token.status_code) != 200:
|
||||
print(free_token.json()['message'])
|
||||
exit(1)
|
||||
return free_token.json()['access_token']
|
||||
free_access_tk = get_free_token(login_endpoint)
|
||||
auth_data = hmaxcfg.get_auth_token_info(login_config)
|
||||
headers = auth_data['headers']
|
||||
headers['authorization'] = "Bearer {}".format(free_access_tk)
|
||||
auth_rep = SESSION.post(url=login_endpoint, headers=headers, json=auth_data['data'])
|
||||
if int(auth_rep.status_code) != 200:
|
||||
print(auth_rep.json()['message'])
|
||||
exit(1)
|
||||
|
||||
access_token_js = auth_rep.json()
|
||||
|
||||
login_grant_access = [
|
||||
{
|
||||
"id": "urn:hbo:privacy-settings:mined",
|
||||
"id": "urn:hbo:profiles:mined",
|
||||
"id": "urn:hbo:query:lastplayed",
|
||||
"id": "urn:hbo:user:me"}
|
||||
]
|
||||
user_grant_access = {
|
||||
"accept": "application/vnd.hbo.v9.full+json",
|
||||
"accept-encoding": "gzip, deflate, br",
|
||||
"accept-language": hmaxcfg.metadata_language,
|
||||
"user-agent": hmaxcfg.UA,
|
||||
"x-hbo-client-version": "Hadron/50.40.0.111 desktop (DESKTOP)",
|
||||
"x-hbo-device-name": "desktop",
|
||||
"x-hbo-device-os-version": "undefined",
|
||||
"Authorization": f"Bearer {access_token_js['refresh_token']}"
|
||||
}
|
||||
user_grant_req = SESSION.post(content_url, json=login_grant_access, headers=user_grant_access)
|
||||
|
||||
if int(user_grant_req.status_code) != 207:
|
||||
print("failed to list profiles")
|
||||
|
||||
user_grant_js = user_grant_req.json()
|
||||
user_grant_id = ""
|
||||
|
||||
for profile in user_grant_js:
|
||||
if profile['id'] == "urn:hbo:profiles:mine":
|
||||
if len(profile['body']['profiles']) > 0:
|
||||
user_grant_id = profile['body']['profiles'][0]['profileId']
|
||||
else:
|
||||
print("no profiles found, create one on hbomax and try again")
|
||||
exit(1)
|
||||
|
||||
profile_headers = {
|
||||
"accept": "application/vnd.hbo.v9.full+json",
|
||||
"accept-encoding": "gzip, deflate, br",
|
||||
"accept-language": hmaxcfg.metadata_language,
|
||||
"user-agent": hmaxcfg.UA,
|
||||
"x-hbo-client-version": "Hadron/50.40.0.111 desktop (DESKTOP)",
|
||||
"x-hbo-device-name": "desktop",
|
||||
"x-hbo-device-os-version": "undefined",
|
||||
"referer": "https://play.hbomax.com/profileSelect",
|
||||
"Authorization": f"Bearer {free_access_tk}" #~ free token
|
||||
}
|
||||
|
||||
user_profile = {
|
||||
"grant_type": "user_refresh_profile",
|
||||
"profile_id": user_grant_id,
|
||||
"refresh_token": f"{access_token_js['refresh_token']}",
|
||||
}
|
||||
|
||||
user_profile_req = SESSION.post(login_endpoint, json=user_profile, headers=profile_headers)
|
||||
|
||||
if int(user_profile_req.status_code) != 200:
|
||||
error_msg = "failed to obatin the final token"
|
||||
print(error_msg)
|
||||
|
||||
user_profile_js = user_profile_req.json()
|
||||
|
||||
refresh_token = user_profile_js['refresh_token']
|
||||
|
||||
login_data = {'ACCESS_TOKEN': refresh_token, 'EXPIRATION_TIME': int(time.time())}
|
||||
if save_login:
|
||||
with open(HMAXTOKEN_FILE, 'w', encoding='utf-8') as f:
|
||||
f.write(json.dumps(login_data, indent=4))
|
||||
f.close()
|
||||
return auth_rep.json()['access_token']
|
||||
|
||||
|
||||
def get_video_payload(urn):
|
||||
headers = hmaxcfg.generate_payload()
|
||||
payload = []
|
||||
payload.append({"id":urn, "headers": headers['headers']})
|
||||
return payload
|
|
@ -0,0 +1,125 @@
|
|||
import uuid, sys
|
||||
import configparser
|
||||
|
||||
from shutil import which
|
||||
from os.path import dirname, realpath, join
|
||||
from os import pathsep, environ
|
||||
|
||||
def generate_device():
|
||||
return str(uuid.uuid4())
|
||||
_uuid = generate_device() #traceid
|
||||
|
||||
user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.77 Safari/537.36'
|
||||
config = {}
|
||||
|
||||
config['la'] = {
|
||||
'tokens': 'https://gateway-latam.api.hbo.com/auth/tokens',
|
||||
'content': 'https://comet-latam.api.hbo.com/content',
|
||||
'license_wv': 'https://comet-latam.api.hbo.com/drm/license/widevine?keygen=playready&drmKeyVersion=2'
|
||||
}
|
||||
|
||||
config['us'] = {
|
||||
'tokens': 'https://gateway.api.hbo.com/auth/tokens',
|
||||
'content': 'https://comet.api.hbo.com/content',
|
||||
'license_wv': 'https://comet.api.hbo.com/drm/license/widevine?keygen=playready&drmKeyVersion=2'
|
||||
}
|
||||
|
||||
metadata_language = 'en-US'
|
||||
|
||||
UA = 'Mozilla/5.0 (Linux; Android 7.1.1; SHIELD Android TV Build/LMY47D) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/84.0.4147.135 Safari/537.36'
|
||||
|
||||
login_headers = {
|
||||
"accept": "application/vnd.hbo.v9.full+json",
|
||||
"accept-encoding": "gzip, deflate, br",
|
||||
"accept-language": metadata_language,
|
||||
"user-agent": UA,
|
||||
"x-hbo-client-version": "Hadron/50.40.0.111 desktop (DESKTOP)",
|
||||
"x-hbo-device-name": "desktop",
|
||||
"x-hbo-device-os-version": "undefined",
|
||||
}
|
||||
|
||||
login_json = {
|
||||
"client_id": '24fa5e36-3dc4-4ed0-b3f1-29909271b63d',
|
||||
"client_secret": '24fa5e36-3dc4-4ed0-b3f1-29909271b63d',
|
||||
"scope":"browse video_playback_free",
|
||||
"grant_type":"client_credentials",
|
||||
"deviceSerialNumber": 'b394a2da-b3a7-429d-8f70-5c4eae50a678',
|
||||
"clientDeviceData":{
|
||||
"paymentProviderCode":"apple"
|
||||
}
|
||||
}
|
||||
|
||||
payload = {
|
||||
'x-hbo-device-model':user_agent,
|
||||
'x-hbo-video-features':'server-stitched-playlist,mlp',
|
||||
'x-hbo-session-id':_uuid,
|
||||
'x-hbo-video-player-version':'QUANTUM_BROWSER/50.30.0.249',
|
||||
'x-hbo-device-code-override':'ANDROIDTV',
|
||||
'x-hbo-video-mlp':True,
|
||||
}
|
||||
|
||||
SCRIPT_PATH = dirname(realpath('hbomax'))
|
||||
|
||||
BINARIES_FOLDER = join(SCRIPT_PATH, 'binaries')
|
||||
COOKIES_FOLDER = join(SCRIPT_PATH, 'cookies')
|
||||
|
||||
MP4DECRYPT_BINARY = 'mp4decrypt'
|
||||
MEDIAINFO_BINARY = 'mediainfo'
|
||||
MP4DUMP_BINARY = 'mp4dump'
|
||||
MKVMERGE_BINARY = 'mkvmerge'
|
||||
FFMPEG_BINARY = 'ffmpeg'
|
||||
FFMPEG_BINARY = 'ffmpeg'
|
||||
ARIA2C_BINARY = 'aria2c'
|
||||
SUBTITLE_EDIT_BINARY = 'subtitleedit'
|
||||
|
||||
# Add binaries folder to PATH as the first item
|
||||
environ['PATH'] = pathsep.join([BINARIES_FOLDER, environ['PATH']])
|
||||
|
||||
MP4DECRYPT = which(MP4DECRYPT_BINARY)
|
||||
MEDIAINFO = which(MEDIAINFO_BINARY)
|
||||
MP4DUMP = which(MP4DUMP_BINARY)
|
||||
MKVMERGE = which(MKVMERGE_BINARY)
|
||||
FFMPEG = which(FFMPEG_BINARY)
|
||||
ARIA2C = which(ARIA2C_BINARY)
|
||||
SUBTITLE_EDIT = which(SUBTITLE_EDIT_BINARY)
|
||||
|
||||
def get_token_info():
|
||||
return {'headers': login_headers, 'data': login_json}
|
||||
|
||||
def get_user_headers():
|
||||
headers = {
|
||||
'origin': 'https://play.hbomax.com',
|
||||
'referer': 'https://play.hbomax.com/',
|
||||
'x-b3-traceid': f'{_uuid}-{_uuid}',
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.61 Safari/537.36',
|
||||
'accept': 'application/vnd.hbo.v9.full+json',
|
||||
'content-type': 'application/json; charset=utf-8',
|
||||
'x-hbo-client-version': 'Hadron/50.50.0.85 desktop (DESKTOP)',
|
||||
'x-hbo-device-name': 'desktop',
|
||||
'x-hbo-device-os-version': 'undefined'}
|
||||
return {'headers': headers}
|
||||
|
||||
def get_auth_token_info(cfg):
|
||||
data = {
|
||||
"grant_type": "user_name_password",
|
||||
"scope": "browse video_playback device elevated_account_management",
|
||||
"username": cfg['username'],
|
||||
"password": cfg['password'],
|
||||
}
|
||||
return {'headers': login_headers, 'data': data, 'device_id': _uuid}
|
||||
|
||||
def generate_payload():
|
||||
return {"headers": payload}
|
||||
|
||||
class HMAXRegion(object):
|
||||
def configHBOMaxLatam():
|
||||
tokens = config['la']['tokens']
|
||||
content = config['la']['content']
|
||||
license_wv = config['la']['license_wv']
|
||||
return tokens, content, license_wv
|
||||
|
||||
def configHBOMaxUS():
|
||||
tokens = config['us']['tokens']
|
||||
content = config['us']['content']
|
||||
license_wv = config['us']['license_wv']
|
||||
return tokens, content, license_wv
|
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,30 @@
|
|||
from shutil import which
|
||||
from os.path import dirname, realpath, join
|
||||
from os import pathsep, environ
|
||||
|
||||
SCRIPT_PATH = dirname(realpath('paramountplus'))
|
||||
|
||||
BINARIES_FOLDER = join(SCRIPT_PATH, 'binaries')
|
||||
|
||||
MP4DECRYPT_BINARY = 'mp4decrypt'
|
||||
MP4DUMP_BINARY = 'mp4dump'
|
||||
MKVMERGE_BINARY = 'mkvmerge'
|
||||
FFMPEG_BINARY = 'ffmpeg'
|
||||
ARIA2C_BINARY = 'aria2c'
|
||||
|
||||
# Add binaries folder to PATH as the first item
|
||||
environ['PATH'] = pathsep.join([BINARIES_FOLDER, environ['PATH']])
|
||||
|
||||
MP4DECRYPT = which(MP4DECRYPT_BINARY)
|
||||
MP4DUMP = which(MP4DUMP_BINARY)
|
||||
MKVMERGE = which(MKVMERGE_BINARY)
|
||||
FFMPEG = which(FFMPEG_BINARY)
|
||||
ARIA2C = which(ARIA2C_BINARY)
|
||||
|
||||
class WvDownloaderConfig(object):
|
||||
def __init__(self, xml, base_url, output_file, track_id, format_id):
|
||||
self.xml = xml
|
||||
self.base_url = base_url
|
||||
self.output_file = output_file
|
||||
self.track_id = track_id
|
||||
self.format_id = format_id
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
class WvDownloaderConfig(object):
|
||||
def __init__(self, xml, base_url, output_file, track_id, format_id, file_type):
|
||||
self.xml = xml
|
||||
self.base_url = base_url
|
||||
self.output_file = output_file
|
||||
self.track_id = track_id
|
||||
self.format_id = format_id
|
||||
self.file_type = file_type
|
|
@ -0,0 +1,116 @@
|
|||
import requests, pathlib
|
||||
import math, subprocess
|
||||
import os, sys, shutil
|
||||
|
||||
class WvDownloader(object):
|
||||
def __init__(self, config):
|
||||
self.xml = config.xml
|
||||
self.output_file = config.output_file
|
||||
self.config = config
|
||||
|
||||
def download_track(self, aria2c_infile, file_name):
|
||||
aria2c_opts = [
|
||||
'aria2c',
|
||||
'--enable-color=false',
|
||||
'--allow-overwrite=true',
|
||||
'--summary-interval=0',
|
||||
'--download-result=hide',
|
||||
'--async-dns=false',
|
||||
'--check-certificate=false',
|
||||
'--auto-file-renaming=false',
|
||||
'--file-allocation=none',
|
||||
'--console-log-level=warn',
|
||||
'-x16', '-s16', '-j16',
|
||||
'-i', aria2c_infile]
|
||||
subprocess.run(aria2c_opts, check=True)
|
||||
|
||||
source_files = pathlib.Path(temp_folder).rglob(r'./*.mp4')
|
||||
with open(file_name, mode='wb') as (destination):
|
||||
for file in source_files:
|
||||
with open(file, mode='rb') as (source):
|
||||
shutil.copyfileobj(source, destination)
|
||||
if os.path.exists(temp_folder):
|
||||
shutil.rmtree(temp_folder)
|
||||
os.remove(aria2c_infile)
|
||||
print('\nDone!')
|
||||
|
||||
def process_url_templace(self, template, representation_id, bandwidth, time, number):
|
||||
if representation_id is not None: result = template.replace('$RepresentationID$', representation_id)
|
||||
if number is not None:
|
||||
nstart = result.find('$Number')
|
||||
if nstart >= 0:
|
||||
nend = result.find('$', nstart+1)
|
||||
if nend >= 0:
|
||||
var = result[nstart+1 : nend]
|
||||
if 'Number%' in var:
|
||||
value = var[6:] % (int(number))
|
||||
else:
|
||||
value = number
|
||||
result = result.replace('$'+var+'$', value)
|
||||
if bandwidth is not None: result = result.replace('$Bandwidth$', bandwidth)
|
||||
if time is not None: result = result.replace('$Time$', time)
|
||||
result = result.replace('$$', '$').replace('../', '')
|
||||
return result
|
||||
|
||||
def generate_segments(self):
|
||||
segment_template = self.get_segment_template()
|
||||
return self.get_segments(segment_template)
|
||||
|
||||
def get_segments(self, segment_template):
|
||||
urls = []
|
||||
urls.append(self.config.base_url + segment_template['@initialization'].replace('$RepresentationID$', self.config.format_id))
|
||||
current_number = 1
|
||||
for seg in self.force_segmentimeline(segment_template):
|
||||
if '@t' in seg:
|
||||
current_time = seg['@t']
|
||||
for i in range(int(seg.get('@r', 0)) + 1):
|
||||
urls.append(self.config.base_url + self.process_url_templace(segment_template['@media'],
|
||||
representation_id=self.config.format_id,
|
||||
bandwidth=None, time=str(current_time), number=str(current_number)))
|
||||
current_number += 1
|
||||
current_time += seg['@d']
|
||||
return urls
|
||||
|
||||
def force_segmentimeline(self, segment_timeline):
|
||||
if isinstance(segment_timeline['SegmentTimeline']['S'], list):
|
||||
x16 = segment_timeline['SegmentTimeline']['S']
|
||||
else:
|
||||
x16 = [segment_timeline['SegmentTimeline']['S']]
|
||||
return x16
|
||||
|
||||
def force_instance(self, x):
|
||||
if isinstance(x['Representation'], list):
|
||||
X = x['Representation']
|
||||
else:
|
||||
X = [x['Representation']]
|
||||
return X
|
||||
|
||||
def get_segment_template(self):
|
||||
x = [item for (i, item) in enumerate(self.xml['MPD']['Period']['AdaptationSet']) if self.config.track_id == item["@id"]][0]
|
||||
segment_level = [item['SegmentTemplate'] for (i, item) in enumerate(self.force_instance(x)) if self.config.format_id == item["@id"]][0]
|
||||
return segment_level
|
||||
|
||||
def run(self):
|
||||
urls = self.generate_segments()
|
||||
|
||||
print('\n' + self.output_file)
|
||||
global temp_folder
|
||||
aria2c_infile = 'aria2c_infile.txt'
|
||||
if os.path.isfile(aria2c_infile):
|
||||
os.remove(aria2c_infile)
|
||||
temp_folder = self.output_file.replace('.mp4', '')
|
||||
if os.path.exists(temp_folder):
|
||||
shutil.rmtree(temp_folder)
|
||||
if not os.path.exists(temp_folder):
|
||||
os.makedirs(temp_folder)
|
||||
|
||||
if len(urls) > 1:
|
||||
num_segments = int(math.log10(len(urls))) + 1
|
||||
with open(aria2c_infile, 'a', encoding='utf8') as (file):
|
||||
for (i, url) in enumerate(urls):
|
||||
file.write(f'{url}\n')
|
||||
file.write(f'\tout={temp_folder}.{i:0{num_segments}d}.mp4\n')
|
||||
file.write(f'\tdir={temp_folder}\n')
|
||||
file.flush()
|
||||
self.download_track(aria2c_infile, self.output_file)
|
||||
print('Done!')
|
|
@ -0,0 +1,110 @@
|
|||
import requests, pathlib
|
||||
import math, subprocess
|
||||
import os, sys, shutil
|
||||
|
||||
class WvDownloader(object):
|
||||
def __init__(self, config):
|
||||
self.xml = config.xml
|
||||
self.output_file = config.output_file
|
||||
self.config = config
|
||||
|
||||
def download_track(self, aria2c_infile, file_name):
|
||||
aria2c_opts = [
|
||||
'aria2c',
|
||||
'--allow-overwrite=true',
|
||||
'--download-result=hide',
|
||||
'--console-log-level=warn',
|
||||
'-x16', '-s16', '-j16',
|
||||
'-i', aria2c_infile]
|
||||
subprocess.run(aria2c_opts, check=True)
|
||||
|
||||
source_files = pathlib.Path(temp_folder).rglob(r'./*.mp4')
|
||||
with open(file_name, mode='wb') as (destination):
|
||||
for file in source_files:
|
||||
with open(file, mode='rb') as (source):
|
||||
shutil.copyfileobj(source, destination)
|
||||
if os.path.exists(temp_folder):
|
||||
shutil.rmtree(temp_folder)
|
||||
os.remove(aria2c_infile)
|
||||
print('\nDone!')
|
||||
|
||||
def process_url_templace(self, template, representation_id, bandwidth, time, number):
|
||||
if representation_id is not None: result = template.replace('$RepresentationID$', representation_id)
|
||||
if number is not None:
|
||||
nstart = result.find('$Number')
|
||||
if nstart >= 0:
|
||||
nend = result.find('$', nstart+1)
|
||||
if nend >= 0:
|
||||
var = result[nstart+1 : nend]
|
||||
if 'Number%' in var:
|
||||
value = var[6:] % (int(number))
|
||||
else:
|
||||
value = number
|
||||
result = result.replace('$'+var+'$', value)
|
||||
if bandwidth is not None: result = result.replace('$Bandwidth$', bandwidth)
|
||||
if time is not None: result = result.replace('$Time$', time)
|
||||
result = result.replace('$$', '$').replace('../', '')
|
||||
return result
|
||||
|
||||
def generate_segments(self):
|
||||
segment_template = self.get_segment_template()
|
||||
return self.get_segments(segment_template)
|
||||
|
||||
def get_segments(self, segment_template):
|
||||
urls = []
|
||||
urls.append(self.config.base_url + segment_template['@initialization'].replace('$RepresentationID$', self.config.format_id))
|
||||
current_number = 1
|
||||
for seg in self.force_segmentimeline(segment_template):
|
||||
if '@t' in seg:
|
||||
current_time = seg['@t']
|
||||
for i in range(int(seg.get('@r', 0)) + 1):
|
||||
urls.append(self.config.base_url + self.process_url_templace(segment_template['@media'],
|
||||
representation_id=self.config.format_id,
|
||||
bandwidth=None, time=str(current_time), number=str(current_number)))
|
||||
current_number += 1
|
||||
current_time += seg['@d']
|
||||
return urls
|
||||
|
||||
def force_segmentimeline(self, segment_timeline):
|
||||
if isinstance(segment_timeline['SegmentTimeline']['S'], list):
|
||||
x16 = segment_timeline['SegmentTimeline']['S']
|
||||
else:
|
||||
x16 = [segment_timeline['SegmentTimeline']['S']]
|
||||
return x16
|
||||
|
||||
def force_instance(self, x):
|
||||
if isinstance(x['Representation'], list):
|
||||
X = x['Representation']
|
||||
else:
|
||||
X = [x['Representation']]
|
||||
return X
|
||||
|
||||
def get_segment_template(self):
|
||||
x = [item for (i, item) in enumerate(self.xml['MPD']['Period']['AdaptationSet']) if self.config.track_id in item["@id"]][0]
|
||||
segment_level = [item['SegmentTemplate'] for (i, item) in enumerate(self.force_instance(x)) if self.config.format_id in item["@id"]][0]
|
||||
return segment_level
|
||||
|
||||
def run(self):
|
||||
urls = self.generate_segments()
|
||||
|
||||
print('\n' + self.output_file)
|
||||
global temp_folder
|
||||
aria2c_infile = 'aria2c_infile.txt'
|
||||
if os.path.isfile(aria2c_infile):
|
||||
os.remove(aria2c_infile)
|
||||
temp_folder = self.output_file.replace('.mp4', '')
|
||||
if os.path.exists(temp_folder):
|
||||
shutil.rmtree(temp_folder)
|
||||
if not os.path.exists(temp_folder):
|
||||
os.makedirs(temp_folder)
|
||||
|
||||
if len(urls) > 1:
|
||||
num_segments = int(math.log10(len(urls))) + 1
|
||||
with open(aria2c_infile, 'a', encoding='utf8') as (file):
|
||||
for (i, url) in enumerate(urls):
|
||||
file.write(f'{url}\n')
|
||||
file.write(f'\tout={temp_folder}.{i:0{num_segments}d}.mp4\n')
|
||||
file.write(f'\tdir={temp_folder}\n')
|
||||
file.flush()
|
||||
self.download_track(aria2c_infile, self.output_file)
|
||||
print('Done!')
|
Binary file not shown.
|
@ -0,0 +1,15 @@
|
|||
|
||||
config = {
|
||||
'proxies': {
|
||||
'none': None
|
||||
},
|
||||
}
|
||||
|
||||
class ProxyConfig(object):
|
||||
def __init__(self, proxies):
|
||||
self.config = config
|
||||
self.config['proxies'] = proxies
|
||||
|
||||
def get_proxy(self, proxy):
|
||||
return self.config['proxies'].get(proxy)
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
import logging, subprocess, re, base64
|
||||
from pywidevine.cdm import cdm, deviceconfig
|
||||
|
||||
class WvDecrypt(object):
|
||||
WV_SYSTEM_ID = [
|
||||
237, 239, 139, 169, 121, 214, 74, 206, 163, 200, 39, 220, 213, 29, 33, 237]
|
||||
|
||||
def __init__(self, init_data_b64, cert_data_b64, device):
|
||||
self.init_data_b64 = init_data_b64
|
||||
self.cert_data_b64 = cert_data_b64
|
||||
self.device = device
|
||||
self.cdm = cdm.Cdm()
|
||||
|
||||
def check_pssh(pssh_b64):
|
||||
pssh = base64.b64decode(pssh_b64)
|
||||
if not pssh[12:28] == bytes(self.WV_SYSTEM_ID):
|
||||
new_pssh = bytearray([0, 0, 0])
|
||||
new_pssh.append(32 + len(pssh))
|
||||
new_pssh[4:] = bytearray(b'pssh')
|
||||
new_pssh[8:] = [0, 0, 0, 0]
|
||||
new_pssh[13:] = self.WV_SYSTEM_ID
|
||||
new_pssh[29:] = [0, 0, 0, 0]
|
||||
new_pssh[31] = len(pssh)
|
||||
new_pssh[32:] = pssh
|
||||
return base64.b64encode(new_pssh)
|
||||
else:
|
||||
return pssh_b64
|
||||
|
||||
self.session = self.cdm.open_session(check_pssh(self.init_data_b64), deviceconfig.DeviceConfig(self.device))
|
||||
if self.cert_data_b64:
|
||||
self.cdm.set_service_certificate(self.session, self.cert_data_b64)
|
||||
|
||||
def log_message(self, msg):
|
||||
return '{}'.format(msg)
|
||||
|
||||
def start_process(self):
|
||||
keyswvdecrypt = []
|
||||
try:
|
||||
for key in self.cdm.get_keys(self.session):
|
||||
if key.type == 'CONTENT':
|
||||
keyswvdecrypt.append(self.log_message('{}:{}'.format(key.kid.hex(), key.key.hex())))
|
||||
|
||||
except Exception:
|
||||
return (
|
||||
False, keyswvdecrypt)
|
||||
else:
|
||||
return (
|
||||
True, keyswvdecrypt)
|
||||
|
||||
def get_challenge(self):
|
||||
return self.cdm.get_license_request(self.session)
|
||||
|
||||
def update_license(self, license_b64):
|
||||
self.cdm.provide_license(self.session, license_b64)
|
||||
return True
|
|
@ -0,0 +1,59 @@
|
|||
# uncompyle6 version 3.7.3
|
||||
# Python bytecode 3.6 (3379)
|
||||
# Decompiled from: Python 3.7.8 (tags/v3.7.8:4b47a5b6ba, Jun 28 2020, 08:53:46) [MSC v.1916 64 bit (AMD64)]
|
||||
# Embedded file name: pywidevine\decrypt\wvdecryptcustom.py
|
||||
import logging, subprocess, re, base64
|
||||
from pywidevine.cdm import cdm, deviceconfig
|
||||
|
||||
class WvDecrypt(object):
|
||||
WV_SYSTEM_ID = [
|
||||
237, 239, 139, 169, 121, 214, 74, 206, 163, 200, 39, 220, 213, 29, 33, 237]
|
||||
|
||||
def __init__(self, init_data_b64, cert_data_b64, device):
|
||||
self.init_data_b64 = init_data_b64
|
||||
self.cert_data_b64 = cert_data_b64
|
||||
self.device = device
|
||||
self.cdm = cdm.Cdm()
|
||||
|
||||
def check_pssh(pssh_b64):
|
||||
pssh = base64.b64decode(pssh_b64)
|
||||
if not pssh[12:28] == bytes(self.WV_SYSTEM_ID):
|
||||
new_pssh = bytearray([0, 0, 0])
|
||||
new_pssh.append(32 + len(pssh))
|
||||
new_pssh[4:] = bytearray(b'pssh')
|
||||
new_pssh[8:] = [0, 0, 0, 0]
|
||||
new_pssh[13:] = self.WV_SYSTEM_ID
|
||||
new_pssh[29:] = [0, 0, 0, 0]
|
||||
new_pssh[31] = len(pssh)
|
||||
new_pssh[32:] = pssh
|
||||
return base64.b64encode(new_pssh)
|
||||
else:
|
||||
return pssh_b64
|
||||
|
||||
self.session = self.cdm.open_session(check_pssh(self.init_data_b64), deviceconfig.DeviceConfig(self.device))
|
||||
if self.cert_data_b64:
|
||||
self.cdm.set_service_certificate(self.session, self.cert_data_b64)
|
||||
|
||||
def log_message(self, msg):
|
||||
return '{}'.format(msg)
|
||||
|
||||
def start_process(self):
|
||||
keyswvdecrypt = []
|
||||
try:
|
||||
for key in self.cdm.get_keys(self.session):
|
||||
if key.type == 'CONTENT':
|
||||
keyswvdecrypt.append(self.log_message('{}:{}'.format(key.kid.hex(), key.key.hex())))
|
||||
|
||||
except Exception:
|
||||
return (
|
||||
False, keyswvdecrypt)
|
||||
else:
|
||||
return (
|
||||
True, keyswvdecrypt)
|
||||
|
||||
def get_challenge(self):
|
||||
return self.cdm.get_license_request(self.session)
|
||||
|
||||
def update_license(self, license_b64):
|
||||
self.cdm.provide_license(self.session, license_b64)
|
||||
return True
|
|
@ -0,0 +1,72 @@
|
|||
import pywidevine.downloader.wvdownloaderconfig as wvdl_cfg
|
||||
|
||||
class VideoTrack(object):
|
||||
def __init__(self, encrypted, size, id, url, codec, bitrate, width, height):
|
||||
self.encrypted = encrypted
|
||||
self.size = size
|
||||
self.id = id
|
||||
self.url = url
|
||||
self.codec = codec
|
||||
self.bitrate = bitrate
|
||||
self.width = width
|
||||
self.height = height
|
||||
|
||||
def get_type(self):
|
||||
return "video"
|
||||
|
||||
def __repr__(self):
|
||||
return "(encrypted={}, size={}, id={}, url={}, codec={}, bitrate={}, width={}, height={})"\
|
||||
.format(self.encrypted, self.size, self.id, self.url, self.codec, self.bitrate, self.width, self.height)
|
||||
|
||||
def get_filename(self, filename, decrypted=False, fixed=False):
|
||||
if not self.encrypted or decrypted:
|
||||
fn = wvdl_cfg.DECRYPTED_FILENAME
|
||||
else:
|
||||
fn = wvdl_cfg.ENCRYPTED_FILENAME
|
||||
if fixed:
|
||||
fn = fn + '_fixed.mkv'
|
||||
return fn.format(filename=filename, track_type="video", track_no=self.id)
|
||||
|
||||
|
||||
|
||||
class AudioTrack(object):
|
||||
def __init__(self, encrypted, size, id, url, codec, bitrate, language):
|
||||
self.encrypted = encrypted
|
||||
self.size = size
|
||||
self.id = id
|
||||
self.url = url
|
||||
self.codec = codec
|
||||
self.bitrate = bitrate
|
||||
self.language = language
|
||||
|
||||
def get_type(self):
|
||||
return "audio"
|
||||
|
||||
def __repr__(self):
|
||||
return "(encrypted={}, size={}, id={}, url={}, codec={}, bitrate={})"\
|
||||
.format(self.encrypted, self.size, self.id, self.url, self.codec, self.bitrate)
|
||||
|
||||
def get_filename(self, filename, decrypted=False, fixed=False):
|
||||
if not self.encrypted or decrypted:
|
||||
fn = wvdl_cfg.DECRYPTED_FILENAME
|
||||
else:
|
||||
fn = wvdl_cfg.ENCRYPTED_FILENAME
|
||||
if fixed:
|
||||
fn = fn + '_fixed.mka'
|
||||
return fn.format(filename=filename, track_type="audio", track_no=self.id)
|
||||
|
||||
|
||||
class SubtitleTrack(object):
|
||||
def __init__(self, id, name, language_code, default, url, type):
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.language_code = language_code
|
||||
self.url = url
|
||||
self.type = type
|
||||
self.default = default
|
||||
|
||||
def __repr__(self):
|
||||
return "(id={}, name={}, language_code={}, url={}, type={})".format(self.id, self.name, self.language_code, self.url, self.type)
|
||||
|
||||
def get_filename(self, filename, subtitle_format):
|
||||
return wvdl_cfg.SUBTITLES_FILENAME.format(filename=filename, language_code=self.language_code, id=self.id, subtitle_type=subtitle_format)
|
|
@ -0,0 +1,199 @@
|
|||
|
||||
import threading
|
||||
import os
|
||||
import requests
|
||||
import math
|
||||
import shutil
|
||||
import pathlib, sys, subprocess
|
||||
|
||||
from requests.sessions import session
|
||||
from tqdm import tqdm
|
||||
from queue import Queue
|
||||
|
||||
dlthreads = 24
|
||||
|
||||
class WvDownloader(object):
|
||||
def __init__(self, config):
|
||||
self.xml = config.xml
|
||||
self.output_file = config.output_file
|
||||
self.tqdm_mode = config.tqdm_mode
|
||||
self.cookies = config.cookies
|
||||
self.config = config
|
||||
|
||||
def download_track(self, aria_input, file_name):
|
||||
aria_command = ['aria2c', '-i', aria_input,
|
||||
'--enable-color=false',
|
||||
'--allow-overwrite=true',
|
||||
'--summary-interval=0',
|
||||
'--download-result=hide',
|
||||
'--async-dns=false',
|
||||
'--check-certificate=false',
|
||||
'--auto-file-renaming=false',
|
||||
'--file-allocation=none',
|
||||
'--console-log-level=warn',
|
||||
'-x16', '-j16', '-s16']
|
||||
if sys.version_info >= (3, 5):
|
||||
aria_out = subprocess.run(aria_command)
|
||||
aria_out.check_returncode()
|
||||
|
||||
source_files = pathlib.Path(temp_folder).rglob(r'./*.mp4')
|
||||
with open(file_name, mode='wb') as (destination):
|
||||
for file in source_files:
|
||||
with open(file, mode='rb') as (source):
|
||||
shutil.copyfileobj(source, destination)
|
||||
if os.path.exists(temp_folder):
|
||||
shutil.rmtree(temp_folder)
|
||||
os.remove(aria_input)
|
||||
print('\nDone!')
|
||||
|
||||
def process_url_templace(self, template, representation_id, bandwidth, time, number):
|
||||
if representation_id is not None: result = template.replace('$RepresentationID$', representation_id)
|
||||
if number is not None:
|
||||
nstart = result.find('$Number')
|
||||
if nstart >= 0:
|
||||
nend = result.find('$', nstart+1)
|
||||
if nend >= 0:
|
||||
var = result[nstart+1 : nend]
|
||||
if 'Number%' in var:
|
||||
value = var[6:] % (int(number))
|
||||
else:
|
||||
value = number
|
||||
result = result.replace('$'+var+'$', value)
|
||||
if bandwidth is not None: result = result.replace('$Bandwidth$', bandwidth)
|
||||
if time is not None: result = result.replace('$Time$', time)
|
||||
result = result.replace('$$', '$').replace('../', '')
|
||||
return result
|
||||
|
||||
def generate_segments(self):
|
||||
segs = self.get_representation_number()
|
||||
return self.get_segments(segs)
|
||||
|
||||
def get_segments(self, segment_level):
|
||||
media = segment_level['@media']
|
||||
current_number = 1
|
||||
current_time = 0
|
||||
for seg in self.force_segment_level(segment_level):
|
||||
if '@t' in seg:
|
||||
current_time = seg['@t']
|
||||
for _ in range(int(seg.get('@r', 0)) + 1):
|
||||
url = self.process_url_templace(media, representation_id=self.config.format_id, bandwidth=self.config.bandwidth, time=str(current_time), number=str(current_number))
|
||||
current_number += 1
|
||||
current_time += int(seg['@d'])
|
||||
yield url
|
||||
|
||||
def force_segment_level(self, segment_level):
|
||||
if isinstance(segment_level['SegmentTimeline']['S'], list):
|
||||
segment_level = segment_level['SegmentTimeline']['S']
|
||||
else:
|
||||
segment_level = [segment_level['SegmentTimeline']['S']]
|
||||
return segment_level
|
||||
|
||||
def get_representation_number(self):
|
||||
x = []
|
||||
for [idx, item] in enumerate(self.xml['MPD']['Period']['AdaptationSet']):
|
||||
try:
|
||||
if self.config.file_type in item.get('@mimeType'):
|
||||
x = idx
|
||||
except TypeError:
|
||||
if self.config.file_type in item.get('@contentType'):
|
||||
x = idx
|
||||
|
||||
y = []
|
||||
if 'video' in self.config.file_type:
|
||||
for [number, rep] in enumerate(self.xml['MPD']['Period']['AdaptationSet'][x]['Representation']):
|
||||
if self.config.format_id == rep.get('@id'):
|
||||
y = number
|
||||
|
||||
mpd = self.xml['MPD']['Period']
|
||||
try:
|
||||
segment_level = mpd['AdaptationSet'][x]['SegmentTemplate']
|
||||
except TypeError:
|
||||
segment_level = mpd['AdaptationSet'][x]['SegmentTemplate']
|
||||
|
||||
return segment_level
|
||||
|
||||
def run(self):
|
||||
segment_list = self.generate_segments()
|
||||
urls = []
|
||||
for seg_url in segment_list:
|
||||
url = self.config.base_url + '/' + seg_url
|
||||
urls.append(url)
|
||||
|
||||
print('\n' + self.output_file)
|
||||
# download por aria2c
|
||||
if not self.tqdm_mode:
|
||||
global temp_folder
|
||||
aria2c_infile = 'aria2c_infile.txt'
|
||||
if os.path.isfile(aria2c_infile):
|
||||
os.remove(aria2c_infile)
|
||||
temp_folder = self.output_file.replace('.mp4', '')
|
||||
if os.path.exists(temp_folder):
|
||||
shutil.rmtree(temp_folder)
|
||||
if not os.path.exists(temp_folder):
|
||||
os.makedirs(temp_folder)
|
||||
|
||||
if len(urls) > 1:
|
||||
num_segments = int(math.log10(len(urls))) + 1
|
||||
with open(aria2c_infile, 'a', encoding='utf8') as (file):
|
||||
for (i, url) in enumerate(urls):
|
||||
file.write(f'{url}\n')
|
||||
file.write(f'\tout={temp_folder}.{i:0{num_segments}d}.mp4\n')
|
||||
file.write(f'\tdir={temp_folder}\n')
|
||||
file.flush()
|
||||
self.download_track(aria2c_infile, self.output_file)
|
||||
else:
|
||||
# download por thread
|
||||
work_q = Queue()
|
||||
result_q = Queue()
|
||||
|
||||
pool = [WorkerThread(work_q=work_q, result_q=result_q, cookies=self.cookies) for i in range(dlthreads)]
|
||||
for thread in pool:
|
||||
thread.start()
|
||||
|
||||
work_count = 0
|
||||
for seg_url in urls:
|
||||
url = seg_url
|
||||
work_q.put((work_count, url, self.cookies))
|
||||
work_count += 1
|
||||
results = []
|
||||
|
||||
for _ in tqdm(range(work_count)):
|
||||
results.append(result_q.get())
|
||||
outfile = open(self.output_file , 'wb+')
|
||||
sortedr = sorted(results, key=lambda v: v[0])
|
||||
for r in sortedr:
|
||||
outfile.write(r[1])
|
||||
outfile.close()
|
||||
del results
|
||||
print('Done!')
|
||||
|
||||
class Downloader:
|
||||
def __init__(self):
|
||||
self.session = requests.Session()
|
||||
|
||||
def DownloadSegment(self, url, cookies):
|
||||
resp = self.session.get(url, cookies=cookies, stream=True)
|
||||
resp.raw.decode_content = True
|
||||
data = resp.raw.read()
|
||||
return data
|
||||
|
||||
class WorkerThread(threading.Thread):
|
||||
def __init__(self, work_q, result_q, cookies):
|
||||
super(WorkerThread, self).__init__()
|
||||
self.work_q = work_q
|
||||
self.result_q = result_q
|
||||
self.cookies = cookies
|
||||
self.stoprequest = threading.Event()
|
||||
self.downloader = Downloader()
|
||||
|
||||
def run(self):
|
||||
while not self.stoprequest.isSet():
|
||||
try:
|
||||
(seq, url, cookies) = self.work_q.get(True, 0.05)
|
||||
self.result_q.put((seq, self.downloader.DownloadSegment(url, cookies)))
|
||||
except:
|
||||
continue
|
||||
|
||||
def join(self, timeout=None):
|
||||
self.stoprequest.set()
|
||||
super(WorkerThread, self).join(timeout)
|
|
@ -0,0 +1,14 @@
|
|||
import re
|
||||
import os
|
||||
import platform
|
||||
|
||||
class WvDownloaderConfig(object):
|
||||
def __init__(self, xml, base_url, output_file, format_id, bandwidth, cookies, file_type, tqdm_mode):
|
||||
self.xml = xml
|
||||
self.output_file = output_file
|
||||
self.base_url = base_url
|
||||
self.format_id = format_id
|
||||
self.bandwidth = bandwidth
|
||||
self.cookies = cookies
|
||||
self.file_type = file_type
|
||||
self.tqdm_mode = tqdm_mode
|
|
@ -0,0 +1,271 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
class Muxer(object):
|
||||
def __init__(self, CurrentName, SeasonFolder, CurrentHeigh, Type, mkvmergeexe):
|
||||
self.CurrentName = CurrentName
|
||||
self.SeasonFolder = SeasonFolder
|
||||
self.CurrentHeigh = CurrentHeigh
|
||||
self.Type = Type
|
||||
self.mkvmergeexe = mkvmergeexe
|
||||
|
||||
def mkvmerge_muxer(self, lang):
|
||||
VideoInputNoExist = False
|
||||
if os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p] [CBR].h264'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [CBR].h264'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p] [CBR].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [CBR].mkv'
|
||||
|
||||
if os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p].h264'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p].h264'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p].mkv'
|
||||
|
||||
elif os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HEVC].mp4'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HEVC].mp4'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HEVC].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HEVC].mkv'
|
||||
|
||||
elif os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HEVC].h265'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HEVC].h265'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HEVC].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HEVC].mkv'
|
||||
|
||||
elif os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p] [VP9].mp4'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [VP9].mp4'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p] [VP9].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [VP9].mkv'
|
||||
|
||||
elif os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p] [VP9].vp9'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [VP9].vp9'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p] [VP9].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [VP9].mkv'
|
||||
|
||||
elif os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HDR].mp4'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HDR].mp4'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HDR].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HDR].mkv'
|
||||
|
||||
elif os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HDR].h265'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HDR].h265'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HDR].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [HDR].mkv'
|
||||
|
||||
elif os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p] [AVC HIGH].mp4'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [AVC HIGH].mp4'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p] [AVC HIGH].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [AVC HIGH].mkv'
|
||||
|
||||
elif os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p] [AVC HIGH].h264'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [AVC HIGH].h264'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p] [AVC HIGH].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [AVC HIGH].mkv'
|
||||
|
||||
elif os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p] [CBR].mp4'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [CBR].mp4'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p] [CBR].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p] [CBR].mkv'
|
||||
|
||||
elif os.path.isfile(self.CurrentName + ' [' + self.CurrentHeigh + 'p].mp4'):
|
||||
VideoInputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p].mp4'
|
||||
if self.Type == "show":
|
||||
VideoOutputName = os.path.join(self.SeasonFolder, self.CurrentName + ' [' + self.CurrentHeigh + 'p].mkv')
|
||||
else:
|
||||
VideoOutputName = self.CurrentName + ' [' + self.CurrentHeigh + 'p].mkv'
|
||||
else:
|
||||
VideoInputNoExist = True
|
||||
|
||||
if VideoInputNoExist == False:
|
||||
AudioExtensionsList=[
|
||||
".ac3",
|
||||
".mka",
|
||||
".eac3",
|
||||
".m4a",
|
||||
".dts",
|
||||
".mp3",
|
||||
".aac"
|
||||
]
|
||||
|
||||
SubsExtensionsList= [
|
||||
".srt",
|
||||
".ass",
|
||||
]
|
||||
|
||||
if lang == "English":
|
||||
language_tag = "English"
|
||||
|
||||
if language_tag == "English":
|
||||
subs_forced = 'Forced'
|
||||
subs_sdh = 'SDH'
|
||||
#["en", "en", "eng", "English", "yes", "yes"]
|
||||
#[audio_language, subs_language, language_id, language_name, audio_default, subs_default]
|
||||
LanguageList = [
|
||||
["es-la", "es-la", "spa", "Spanish", "yes", "no"],
|
||||
["en", "en", "eng", "English", "no", "no"],
|
||||
["pt-br", "pt-br", "por", "Brazilian Portuguese", "no", "no"],
|
||||
["es", "es", "spa", "Castilian", "no", "no"],
|
||||
["cat", "cat", "cat", "Catalan", "no", "no"],
|
||||
["eu", "eu", "baq", "Basque", "no", "no"],
|
||||
["fr", "fr", "fre", "French", "no", "no"],
|
||||
["fr-bg", "fr-bg", "fre", "French (Belgium)", "no", "no"],
|
||||
["fr-lu", "fr-lu", "fre", "French (Luxembourg)", "no", "no"],
|
||||
["fr-ca", "fr-ca", "fre", "French (Canada)", "no", "no"],
|
||||
["de", "de", "ger", "German", "no", "no"],
|
||||
["it", "it", "ita", "Italian", "no", "no"],
|
||||
["pl", "pl", "pol", "Polish", "no", "no"],
|
||||
["tr", "tr", "tur", "Turkish", "no", "no"],
|
||||
["hy", "hy", "arm", "Armenian", "no", "no"],
|
||||
["sv", "sv", "swe", "Swedish", "no", "no"],
|
||||
["da", "da", "dan", "Danish", "no", "no"],
|
||||
["fi", "fi", "fin", "Finnish", "no", "no"],
|
||||
["nl", "nl", "dut", "Dutch", "no", "no"],
|
||||
["nl-be", "nl-be", "dut", "Flemish", "no", "no"],
|
||||
["no", "no", "nor", "Norwegian", "no", "no"],
|
||||
["lv", "lv", "lav", "Latvian", "no", "no"],
|
||||
["is", "is", "ice", "Icelandic", "no", "no"],
|
||||
["ru", "ru", "rus", "Russian", "no", "no"],
|
||||
["uk", "uk", "ukr", "Ukrainian", "no", "no"],
|
||||
["hu", "hu", "hun", "Hungarian", "no", "no"],
|
||||
["bg", "bg", "bul", "Bulgarian", "no", "no"],
|
||||
["hr", "hr", "hrv", "Croatian", "no", "no"],
|
||||
["lt", "lt", "lit", "Lithuanian", "no", "no"],
|
||||
["et", "et", "est", "Estonian", "no", "no"],
|
||||
["el", "el", "gre", "Greek", "no", "no"],
|
||||
["he", "he", "heb", "Hebrew", "no", "no"],
|
||||
["ar", "ar", "ara", "Arabic", "no", "no"],
|
||||
["fa", "fa", "per", "Persian", "no", "no"],
|
||||
["ro", "ro", "rum", "Romanian", "no", "no"],
|
||||
["sr", "sr", "srp", "Serbian", "no", "no"],
|
||||
["cs", "cs", "cze", "Czech", "no", "no"],
|
||||
["sk", "sk", "slo", "Slovak", "no", "no"],
|
||||
["sl", "sl", "slv", "Slovenian", "no", "no"],
|
||||
["sq", "sq", "alb", "Albanian", "no", "no"],
|
||||
["bs", "bs", "bos", "Bosnian", "no", "no"],
|
||||
["mk", "mk", "mac", "Macedonian", "no", "no"],
|
||||
["hi", "hi", "hin", "Hindi", "no", "no"],
|
||||
["bn", "bn", "ben", "Bengali", "no", "no"],
|
||||
["ur", "ur", "urd", "Urdu", "no", "no"],
|
||||
["pa", "pa", "pan", "Punjabi", "no", "no"],
|
||||
["ta", "ta", "tam", "Tamil", "no", "no"],
|
||||
["te", "te", "tel", "Telugu", "no", "no"],
|
||||
["mr", "mr", "mar", "Marathi", "no", "no"],
|
||||
["kn", "kn", "kan", "Kannada (India)", "no", "no"],
|
||||
["gu", "gu", "guj", "Gujarati", "no", "no"],
|
||||
["ml", "ml", "mal", "Malayalam", "no", "no"],
|
||||
["si", "si", "sin", "Sinhala", "no", "no"],
|
||||
["as", "as", "asm", "Assamese", "no", "no"],
|
||||
["mni", "mni", "mni", "Manipuri", "no", "no"],
|
||||
["tl", "tl", "tgl", "Tagalog", "no", "no"],
|
||||
["id", "id", "ind", "Indonesian", "no", "no"],
|
||||
["ms", "ms", "may", "Malay", "no", "no"],
|
||||
["fil", "fil", "fil", "Filipino", "no", "no"],
|
||||
["vi", "vi", "vie", "Vietnamese", "no", "no"],
|
||||
["th", "th", "tha", "Thai", "no", "no"],
|
||||
["km", "km", "khm", "Khmer", "no", "no"],
|
||||
["ko", "ko", "kor", "Korean", "no", "no"],
|
||||
["zh", "zh", "chi", "Mandarin", "no", "no"],
|
||||
["yue", "yue", "chi", "Cantonese", "no", "no"],
|
||||
["zh-hans", "zh-hans", "chi", "Chinese (Simplified)", "no", "no"],
|
||||
["zh-hant", "zh-hant", "chi", "Chinese (Traditional)", "no", "no"],
|
||||
["zh-hk", "zh-hk", "chi", "Chinese (Simplified)", "no", "no"],
|
||||
["zh-tw", "zh-tw", "chi", "Chinese (Traditional)", "no", "no"],
|
||||
["zh-sg", "zh-sg", "chi", "Chinese (Singapore)", "no", "no"],
|
||||
["ja", "ja", "jpn", "Japanese", "no", "no"],
|
||||
["tlh", "tlh", "tlh", "Klingon", "no", "no"],
|
||||
["zxx", "zxx", "zxx", "No Dialogue", "no", "no"]
|
||||
]
|
||||
|
||||
ALLAUDIOS = []
|
||||
default_active_audio = False
|
||||
for audio_language, subs_language, language_id, language_name, audio_default, subs_default in LanguageList:
|
||||
for AudioExtension in AudioExtensionsList:
|
||||
if os.path.isfile(self.CurrentName + ' (' + audio_language + ')' + AudioExtension):
|
||||
if default_active_audio == True: audio_default = "no"
|
||||
ALLAUDIOS = ALLAUDIOS + ['--language', '0:' + language_id, '--track-name', '0:' + language_name, '--default-track', '0:' + audio_default, '(', self.CurrentName + ' (' + audio_language + ')' + AudioExtension, ')']
|
||||
if audio_default == "yes": default_active_audio = True
|
||||
|
||||
for audio_language, subs_language, language_id, language_name, audio_default, subs_default in LanguageList:
|
||||
for AudioExtension in AudioExtensionsList:
|
||||
if os.path.isfile(self.CurrentName + ' (' + audio_language + '-ad)' + AudioExtension):
|
||||
if default_active_audio == True: audio_default = "no"
|
||||
ALLAUDIOS = ALLAUDIOS + ['--language', '0:' + language_id, '--track-name', '0:' + language_name + ' (Audio Description)', '--default-track', '0:no', '(', self.CurrentName + ' (' + audio_language + '-ad)' + AudioExtension, ')']
|
||||
if audio_default == "yes": default_active_audio = True
|
||||
|
||||
OnlyOneLanguage = False
|
||||
if len(ALLAUDIOS) == 9:
|
||||
OnlyOneLanguage = True
|
||||
|
||||
elif len(ALLAUDIOS) == 18:
|
||||
if ALLAUDIOS[1] == ALLAUDIOS[10]:
|
||||
if '-ad' in ALLAUDIOS[7] or '-ad' in ALLAUDIOS[16]:
|
||||
OnlyOneLanguage = True
|
||||
else:
|
||||
OnlyOneLanguage = False
|
||||
|
||||
|
||||
ALLSUBS = []
|
||||
default_active_subs = False
|
||||
for audio_language, subs_language, language_id, language_name, audio_default, subs_default in LanguageList:
|
||||
for SubsExtension in SubsExtensionsList:
|
||||
if os.path.isfile(self.CurrentName + ' (' + subs_language + '-forced)' + SubsExtension):
|
||||
if subs_default == "yes": default_active_subs = True
|
||||
ALLSUBS = ALLSUBS + ['--language', '0:' + language_id, '--track-name', '0:' + subs_forced, '--forced-track', '0:yes', '--default-track', '0:' + subs_default, '--compression', '0:none', '(', self.CurrentName + ' (' + subs_language + '-forced)' + SubsExtension, ')']
|
||||
|
||||
if OnlyOneLanguage == True:
|
||||
if default_active_subs == True: subs_default = "no"
|
||||
if os.path.isfile(self.CurrentName + ' (' + subs_language + ')' + SubsExtension):
|
||||
ALLSUBS = ALLSUBS + ['--language', '0:' + language_id, '--forced-track', '0:no', '--default-track', '0:' + subs_default, '--compression', '0:none', '(', self.CurrentName + ' (' + subs_language + ')' + SubsExtension, ')']
|
||||
|
||||
else:
|
||||
if os.path.isfile(self.CurrentName + ' (' + subs_language + ')' + SubsExtension):
|
||||
ALLSUBS = ALLSUBS + ['--language', '0:' + language_id, '--forced-track', '0:no', '--default-track', '0:no', '--compression', '0:none', '(', self.CurrentName + ' (' + subs_language + ')' + SubsExtension, ')']
|
||||
|
||||
if os.path.isfile(self.CurrentName + ' (' + subs_language + '-sdh)' + SubsExtension):
|
||||
ALLSUBS = ALLSUBS + ['--language', '0:' + language_id, '--track-name', '0:' + subs_sdh, '--forced-track', '0:no', '--default-track', '0:no', '--compression', '0:none', '(', self.CurrentName + ' (' + subs_language + '-sdh)' + SubsExtension, ')']
|
||||
|
||||
#(Chapters)
|
||||
if os.path.isfile(self.CurrentName+' Chapters.txt'):
|
||||
CHAPTERS=['--chapter-charset', 'UTF-8', '--chapters', self.CurrentName + ' Chapters.txt']
|
||||
else:
|
||||
CHAPTERS=[]
|
||||
|
||||
|
||||
mkvmerge_command_video = [self.mkvmergeexe,
|
||||
'-q',
|
||||
'--output',
|
||||
VideoOutputName,
|
||||
'--language',
|
||||
'0:und',
|
||||
'--default-track',
|
||||
'0:yes',
|
||||
'(',
|
||||
VideoInputName,
|
||||
')']
|
||||
|
||||
|
||||
|
||||
mkvmerge_command = mkvmerge_command_video + ALLAUDIOS + ALLSUBS + CHAPTERS
|
||||
mkvmerge_process = subprocess.run(mkvmerge_command)
|
|
@ -0,0 +1,28 @@
|
|||
astroid>=1.5.3
|
||||
beautifulsoup4>=4.4.1
|
||||
certifi>=2017.4.17
|
||||
chardet>=3.0.3
|
||||
colorama>=0.3.9
|
||||
cssutils>=1.0.2
|
||||
dnspython>=1.15.0
|
||||
future>=0.16.0
|
||||
idna>=2.5
|
||||
isort>=4.2.14
|
||||
lazy-object-proxy>=1.3.1
|
||||
lxml>=4.2.4
|
||||
mccabe>=0.6.1
|
||||
pathvalidate>=0.22.0
|
||||
protobuf>=3.0.0b2
|
||||
pycountry>=18.5.26
|
||||
pycryptodomex>=3.4.6
|
||||
pymsl>=1.2
|
||||
pylint>=1.7.1
|
||||
pysubs2>=0.2.1
|
||||
requests>=2.17.3
|
||||
six>=1.10.0
|
||||
tqdm>=4.14.0
|
||||
unidecode>=1.0.23
|
||||
urllib3>=1.21.1
|
||||
win10toast>=0.9; sys_platform == 'win32'
|
||||
wrapt>=1.10.10
|
||||
xmltodict>=0.11.0
|
Loading…
Reference in New Issue