public inbox for gentoo-commits@lists.gentoo.org
 help / color / mirror / Atom feed
* [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/
@ 2018-05-02  7:21 Jason Zaman
  0 siblings, 0 replies; 11+ messages in thread
From: Jason Zaman @ 2018-05-02  7:21 UTC (permalink / raw
  To: gentoo-commits

commit:     650e372ea172f7821d6a055ff26000de387aeb30
Author:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
AuthorDate: Wed May  2 07:19:06 2018 +0000
Commit:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
CommitDate: Wed May  2 07:19:55 2018 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=650e372e

sci-libs/tensorflow: new package 1.8.0

Package-Manager: Portage-2.3.24, Repoman-2.3.6

 sci-libs/tensorflow/Manifest                       |  42 +++
 ...-modularize-build-script-to-allow-distros.patch | 211 ++++++++++++++
 sci-libs/tensorflow/metadata.xml                   |  15 +
 sci-libs/tensorflow/tensorflow-1.8.0.ebuild        | 302 +++++++++++++++++++++
 4 files changed, 570 insertions(+)

diff --git a/sci-libs/tensorflow/Manifest b/sci-libs/tensorflow/Manifest
new file mode 100644
index 00000000000..c605cf57cd4
--- /dev/null
+++ b/sci-libs/tensorflow/Manifest
@@ -0,0 +1,42 @@
+DIST 08039ba8ca59f64248bb3b6ae016460fe9c9914f.tar.gz 433332 BLAKE2B a5a9022c046a8362681cc54e755f1ae1c96a9100be0c1891841ab0326f56f564b4c494ba268573cb5450525e9e35d337ba2505ce63e576ea37da84e7a6554b5d SHA512 9fee59ae3485cbde8adec062af68e9de6be588607bb9280fffd64edbd6ba6f973dd4e158fe229b4228a44d949e68271645f918da720a2a398d1ba30b0b31c4bf
+DIST ARM_NEON_2_x86_SSE-0f77d9d182265259b135dad949230ecbf1a2633d.tar.gz 99753 BLAKE2B 79b0c9e2f3a8de91bb042e55cbc9f589970add4c9bdccd96c9a0f38887418c97aa67fc433b4feffb92a1384039b0d68a7cc6cda141371427a310172422bd41d6 SHA512 a21df040a5b3f67d9be006ae414113fa1ca86d38fa446acfab18afcbbb89ee7c48776fac5565d0694c99f1ff6ead3b10a9915eac7aae4d2a532ef5ac126a0072
+DIST LMDB_0.9.19.tar.gz 143141 BLAKE2B 69f39e2194638989d8a17b79edd7918966d5608f4e611a7446b45e7321adf3b7d4bd69107fe00a1476d32cbd43b83f512c8498cefa3fca7eea8ba615821341fe SHA512 1d9825f09592ad92a540a1dec232cf6649f41cc67b0d59dc8958a71f4090f347c84995f32a166e2c2efecb862981f455814dd07af99bc3489e42fe3fd8bc6191
+DIST abseil_cpp-720c017e30339fd1786ce4aac68bc8559736e53f.tar.gz 529637 BLAKE2B fea7eceda492764613ffe944331e6b08af33d600036e9b2453f4c09e892f22f79a95b00a29a9bdf9e705d11502027d88659e19aa484cf4a02158209b058cec13 SHA512 3ac70cb924518627dfc6ec2eb5f65bc0e1235898eefebb0d0410867270065aa745d21456f1212b916e732b3f5dada18b03e526e02d09a09f445531798219006e
+DIST abseil_py-acec853355ef987eae48a8d87a79351c15dff593.tar.gz 175877 BLAKE2B f3c9b36101321f1e72c4eb2cbf11b42e36dc95bb1682b124b77652d2a2bee912459fa92f40718c80aa22f18611c77d13ad08e2888b20ae8895c115191e2b7397 SHA512 154056acee09b8c193d0bc8c64dd98bab87bfd779b04f81b1da7ace3004fcdf780f3ed0066ca3204850c2b80a6b1185e5e435ed852b73af85a6d899addc1c8bc
+DIST astor-0.6.2.tar.gz 29494 BLAKE2B aec99ed7702da3125f7a8666b0e432ca7f32511451b0922a7c7b2239414674be205f555ba4d1672f3d44f9140c0690d17e0374d339b2bbcd40737d69d748e95a SHA512 a6df0e65f429025caa0fbdd95f119fff94867709c10ade99fdc681a9886e916b67411384bf6ca37ceea478c65add5dc0a74585e17aafc706c8f0363e3c8585a4
+DIST aws_sdk_cpp-1.3.15.tar.gz 14181795 BLAKE2B 8e6d407876c4d68b73e22a21c3863efe1155231835be8edb4d7c0f1cceca1880ba1c6791bc3fda8226a22a201515cd44dbe7020883f21cdbbeb3559c8c733a0e SHA512 844addabf01bdbba2c50f94303f23fd092ac5593c42c782aad5959d5edaadb0bec8a6408a91f4605cef996f0d5a23fd94ca3c194829c65db98291d5fa7150bff
+DIST backports.weakref-1.0rc1.tar.gz 7761 BLAKE2B 4cb2554a4b71bb1f8c41a790268511e4b382effc7e0328f74346d086de539a177111de9e2dabac19a44b2a1cdbf59a7425e30eee4caa4bfe66b0ca97f1f460a9 SHA512 f37e9eb0d9060d2e1588a941f623460bd4477d7e180b38b5cd46d9c1db1ee094ae63e4f5eeeb422823bf3d067f46c4124cb124a9e4ddb795bc0bfbc4802ab826
+DIST boringssl-a0fb951d2a26a8ee746b52f3ba81ab011a0af778.tar.gz 22702000 BLAKE2B d54e52832ea2655590ee6d3c9063f684cd1ac1efacc6e6c60fe3177f799eaae6cf3d6fbb1a616e98514dd0bdc84ee35731bd9e285b5cd7a91a19b9493f694582 SHA512 879a48626fb58263d117f8a8b1c5b69fa99e1f06e71d5321620b16c107011c1f22d3e3443b5ad6a908ca139fcc55bdfc36fb80e884fe4490fc4dd982b668f128
+DIST cub-1.8.0.zip 602396 BLAKE2B a5e302a52e04f85ae8e1836c59e799532a8b961a8d10c08fe1241c9045f88e31f1ebda6d26124b85c6df40968e1c7b3985a39961bf7614535aafcab2da1a576a SHA512 6167c5be94989f88e05a07508cf69cf2c6b22a5ac733804043daa46bd86a44a15a758b1ffb811bab28b82039d027516ed85b3d9f7677a740a7e00ec9c2f81aed
+DIST curl-7.49.1.tar.gz 8885826 BLAKE2B 89bcc9f775bd26ac2f7f38097c8e506a5098f22264aab1d0db9f474bd1161d434264faaf4829882cb9df8817e680d314fb2e1288fcaba3df73b6d569eba2eefe SHA512 eb7cf0d34130b7085f342d2fd44457479e6e9d55e9fc4c46b6c5631d8c792eb376275501a89867b64ef1134084ffc7cd6e91f732a2265c331dc1d405dd65d1a3
+DIST cython-3732784c45cfb040a5b0936951d196f83a12ea17.tar.gz 1796107 BLAKE2B 65cd3ec2e83cfdaec2998091eed6506071a8b28a1bf41d9f59ed8ab05a0559355e831d1c6b550aa3fcc65959e29f6c531c30bf6869a5fd968a351d3f4f8eeef8 SHA512 556377644ebe111a36061768560a06d70484749cc78be0b53a9ad4dd9d40fb7c3aac4ad09af1cf7a5e8274e7289ac540f58c3026522ff5e251ddddaf840ef955
+DIST eigen-6913f0cf7d06.tar.gz 2296773 BLAKE2B f3d4ed1577abc7f722fb600e79bcb1294ae5d6a4499668df05a8b9014bfafb576e6d15d3e311080e6e6ed3afd00e60ccd0c1092161a218d200a32668bef83ea2 SHA512 e1cbd55edfc128ee5c2d0bc08a7fec051efb5713f78e2d323ce49074bf1b86d63e7fcc06c01e469c01d2476dd369e4159ecb306f6d28371a3274402d0ea25e87
+DIST farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz 467122 BLAKE2B 8b9dd426f4b9f732df6c8c09d868d1b1cc006c395b1525c46ea91c75318b723358c8e64bb0d86a73aace2032eded93f0d80cc7c2c77fddd6a39e3402ab4f2cb7 SHA512 7f5110514a8cdc7ce48d33fd94ad67d96a24e42e11b203e4d0986f4190545f051b2a63f8e09f2ac38c3aa4a5c62d9a0e02cd8ce02045037381fb62a99f0dcd00
+DIST fft.tgz 72213 BLAKE2B 4a3ac2b4c0bc3275b5743df59241e1cdbd0200371c153ddf54ef6c7c4ec523ee6560547e2d5ef9f3200037c0635bf41c18991ac35f271b1e600d0dbd65d1a9a7 SHA512 f1ceac00cb7b9eb8f625eee7f1f5eea8af363343589a344226628d68baf668c176e6c23b7f904c4e682330352eaa0cd5d00731340d208e94c9657b8f85ae2240
+DIST flatbuffers-971a68110e4fc1bace10fcb6deeb189e7e1a34ce.tar.gz 613854 BLAKE2B 982142b7dd81791f221d7f3cbbecf6539549c86c436e0cf3ae0c57bd7535b90083c7d2709abc811abcf3b6fee417a16b1d8dcdedcce58fd8715db7ae785a7bff SHA512 01ffc047483edaaccc13bf256d59eef0c4cb7488618f1e908c74e0dcc599f00856d052d209a079d35e385bef973a4c6daf401107c4be85bba46dcc3b4f453b26
+DIST gast-0.2.0.tar.gz 9400 BLAKE2B 1392b9ef3a007d89b6b7af5ca49f3dcaa0689e166845ea25c2e544578f65915114d763b54503825dc2b6958a71ac284ec7ae2cc2cfa861494da28df786d679b4 SHA512 651719c3035d21e02e39cd83fd54212a12c39addf69f7c9e8810ccb7bb9aae7828e176ad0adf55182f0628f18d76cb1e286e498a9665ddb12e91a51dc2303381
+DIST gemmlowp-7c7c744640ddc3d0af18fb245b4d23228813a71b.zip 903072 BLAKE2B 26e49a36f7af265ba4fd720864fd148055e9491d0c0100988b474c41920a0b9d698c634a59280f458941a21af15017966bbb2f65e3b399bbcbf836e1232b9d7e SHA512 b754e9b4fb28884e5fbca56bf0fd59d8bbfa3e01900bf6860a2ee24047b21c71cd076ac27d5cc5b3f44c325fb6fb516529dfdcf720fecdb27e7fef38a4f6da13
+DIST giflib-5.1.4.tar.gz 738552 BLAKE2B 414cfdb488a12bcebc6e6e1db9face0e69430e5af94929e7935dc38ff2355182e223b5d87aef53d405f5f79867b4bc1e756f84134d60a01bcb8b5bf5b6859fa1 SHA512 d9a98a593bcfbbfb2fcbeef1382ca9669a6b85276512e6828c92869d16c7cfe980bbb318766fc176be3c9f7deff7878d2be8aeda1d40af2cc4ac723fe6121b83
+DIST grpc-bd6bdf93279a39a8cd92978fd7c9d14eccd98fc2.tar.gz 13478349 BLAKE2B 81f6722c893332c6be7875f9bd4f8311eca989c6dcaf87e47de038abe43b4d8eef5aaa0326a15a7c4f5c7a922e85bc006ace244ef4004ed78bf91645711adc12 SHA512 9268fc28bc128d2ad944fff4d9e1764a5cf0ab7c4d3f693607f42413cccdb766ec349b8f2e487bef823ac65007da9b9e3fdf91a7f623634616d91a68477b2951
+DIST highwayhash-dfcb97ca4fe9277bf9dc1802dd979b071896453b.tar.gz 121307 BLAKE2B 73402228c12dd0044960db0a2366d63dc6aaa3b8b093bfed7a1f32dfe063499913094beb4d93d67e1926b6854625bafc2f3e279e84e01220b279a3afd1cc3300 SHA512 235ede2ca52e22aa335e31969d2318fd0e413f07492d599bfc0a605ad53090731fe1285be177e2c66b6eb8379e98cbb8389f4d1c7b2bae20d3808bbcf5ecfb51
+DIST jemalloc-4.4.0.tar.gz 444180 BLAKE2B fde0ce34e56deab10e01387aeeb3ec1c5c01a21324c2144a3dfd5c96798aa38fdd4320a91a3b4cdd0ccc956adc107f8db893a1c129a83721ec19f014cea6e638 SHA512 77ee7554e8c4621d1c8ef65402213044718137c513c4c44249929f2df7e89029ea710dec3e67b9068feeb89a87634fa5c2093cd875b8aed98153c7fb5a9d999f
+DIST jsoncpp-11086dd6a7eba04289944367ca82cea71299ed70.tar.gz 164140 BLAKE2B 17a50cbbfa95c84b746a9168670ab755424a93d60b535414d4a744631cf012499b5b0c4b33198c3744210a62bbfe5c3269aee955eb1b055076c0df748a48cb90 SHA512 8b99c55fa76661d3c37b23b3a6035b3bf1f98ef6fe336536e49ac50a92563657066677e32c6cdffff6f04848e86417144236111ce2b37fd5a84ab24754380a18
+DIST libjpeg_turbo-1.5.1.tar.gz 1204374 BLAKE2B 22463c083f1255c059e6376ea104a9eb54a18aba6c66a563dcd1173eb2a271eb1d040eb21cd1d7623d04aa99a3b1eda36de4aed6502d6a8a2d5fba93b3773383 SHA512 5dc11da0d1008dc8e288e9e3242a68c2bc6700b0dcff818d3774815ca255441e8c60eb46df6934e578383b5530e9ebdfebad47c55137ca57a109e8d2ebc66c40
+DIST libpng-v1.6.34.tar.gz 1146972 BLAKE2B 5d9c1e38cf5170b8b86a9b8bde380e735cd765bbac339cbe17269a116d64fa9e9c3b99b04e483f725b6cb1fdf778128278f1670062ecaa2565c91db59d4d2b03 SHA512 23b6112a1d16a34c8037d5c5812944d4385fc96ed819a22172776bdd5acd3a34e55f073b46087b77d1c12cecc68f9e8ba7754c86b5ab6ed3016063e1c795de7a
+DIST librdkafka-v0.11.1.tar.gz 859238 BLAKE2B bc53f358320aa40d1acb209daa2594f18421bb6913bad7ba08a5612dfd6683e65489dc31ed67adc9df090d719ea6ab1031f46e524f055c61cab3f1f941a7163b SHA512 24aba6cfbd5df6a010a735fde455a9272c85b26055b172893b8e5259c2d6a7f2adc9637699f7f3b7270200a280cd95d52779a8f89fbb6af5fc035488ff54310e
+DIST libxsmm-1.8.1.tar.gz 893332 BLAKE2B b09879ef45a8d3007846361aaad8536dac526297583d29f28378a91c05fdd3a4c51e399d7d5b05dd4456268d5fd3e7014d889a41be61c1d99425dd7234b0dd6e SHA512 30388de1edf46ae40b2ccdf224bd9febd1ffbb91f8250c89c03044fe1ea75754ce5155107ec9d027fe2e08963f1ac9d2e526ea84618d2cf50c2d5dfe8a1e074d
+DIST llvm-7e78daafdd22f3f17720a103d29d89590534004e.tar.gz 39754833 BLAKE2B 30fe6c9267b89c9e0f046082af114c9eb9c32baeb13ab1f12b6c9c9ce7e8f83643635879fda4b3a82f5039b1aa6ff63f67546806cc7e6834e8e5399fa1d1f2d6 SHA512 b1ccaa3f39ad38946a2c177e08692719469b0747fbb63afa3054e161d24a61b5223a0c35eb0c5cc226d08416accda34035fa538803da320de205a4eb114b4f02
+DIST mkl_dnn-v0.12.tar.gz 759597 BLAKE2B ad790db940ca5353bc20db9e012356761e5a87d6646eeae983880881e1df271a0a4deb3b6810546fa8cf6f2c35c45509c57630cf8d493d72d557d33864f780f3 SHA512 e6a257679b872d16e320556ca2c1ddb5c04a949ee8b8d6b4de350c46ad4ea79749d5d9f92940c592706719490ad837210335027f782a8df46da1a2029ae5adcc
+DIST nasm-2.12.02.tar.bz2 961257 BLAKE2B f45c35571fa1254ccc01ed59de8a46cb480300404c701b444a7a751d077d23f64a7dbeeeb044d7daed8a6f895b1100f76f2db75ced6417ab56cb83b9494eece1 SHA512 7744a078647861382212de798d200b683cc4044881474fc1931acb14dfa0340ce026d1b3f64ac1ece87878ccd0ddcb69b8c0c850415313f47e01f0332e56a8f8
+DIST nsync-0559ce013feac8db639ee1bf776aca0325d28777.tar.gz 151680 BLAKE2B 280f8800e8674fbab90d15c09ab7e260b3230bfcf0fb44c998955c9bd4aaf07b4a9073a74479c8879811c56863d4c28693e408a4b205e5b5d9c801bb82159216 SHA512 ef61f9f0b12cf1252840966243ef4b4a0cc18afde7eddedb7e3f6ea31fa8a89da5570cfe6be536d721e4b7593249d05bb1c60372f6668635ad8bcde78bfc8dee
+DIST pcre-8.39.tar.gz 2062258 BLAKE2B 1bc2b7b822383ba67b3e9707178fd100a13370a3a4ce8e6527fe0356f7d3e604a9ece11cef5e9e85925fc309698887389e67759d820b8b7a99703cc1cd3ccc88 SHA512 1724b2d6d8575af19fc20a13e50d71cb8024a66458d63cbc006b5bca01230845730535f3446d3850ddd580c93031f38c3ebf547adce0d279dfd18101677403da
+DIST protobuf-396336eb961b75f03b25824fe86cf6490fb75e3a.tar.gz 4803167 BLAKE2B 466db7931c46df5f05018f4ebee0f1061a416b43486dce1fb4953a677f73138c74cbb97f333447862a571bcefe34dd0be3aac18e51b24f9921cb344e4e033af9 SHA512 5af4ebdd8a86a5e50c14c18590e372ea9454cadb101555e00008ebffe448acbaa8c57eeae857a53f20c64c0174ffe0a4d19ba393e2ca53beebad61d5b1db06c1
+DIST re2-26cd968b735e227361c9703683266f01e5df7857.tar.gz 387722 BLAKE2B 122b9f30ec2534877580227fadfce6da6db00e9a3ec29140082580d71e6c4c510d5b6b7400633987d9cb546f88d26726ea6cbc7944aa925272a6dfccfbf728c9 SHA512 cbc81a948c8150416ba437f9fd7248d4e15c03160635d41702c79b981f05092dc052c5c0ee89a9fb7e163dffe148585c652207d9707214488c30bfd3aaaec18d
+DIST six-1.10.0.tar.gz 29630 BLAKE2B 1f380dbc7a9e746b4a7909de54a58a008a73d7b6c582611b2481475d5d9971c936b1742ea0821c65150694914af72f43155caa54616268735a4bd2447d5b4ba4 SHA512 9a53b7bc8f7e8b358c930eaecf91cc5639176a699830153f586780c3e6d637f1bd31349a69c383574f99da19cb3a36524e7733a318f3572b27aefb69c6409c2e
+DIST snappy-1.1.7.tar.gz 1090550 BLAKE2B e389c96e093de5db92b896a7fa17a90d3ad0be07972e279321f24875f0f0c8c704ff478f4902a6356d458f93c249e3e06424d5a4efc9753a23ab12b7ee83a088 SHA512 32046f532606ba545a4e4825c0c66a19be449f2ca2ff760a6fa170a3603731479a7deadb683546e5f8b5033414c50f4a9a29f6d23b7a41f047e566e69eca7caf
+DIST sqlite-amalgamation-3200000.zip 2069628 BLAKE2B 97b641fdd143af8bd1f4b7b733f9823196d09ac1b1c1ea5b093da12592957b66d8be2450a7edf18c54ae83a03e09564d5c7f6b0cb8dd48d4fa8b4f2b153f62a7 SHA512 659b5fe93d526d6121f1530581d3fce5af90c986decf2fab7505fea14dc1ebc9167943261a433207fd70df0019bbcb4faeb5e49e586fc9ba6300439b6e00d1e8
+DIST swig-3.0.8.tar.gz 7937213 BLAKE2B 38f17631a36f178a478bf77f1a7ea77b50b91ed95ca0363f0f75d86bf7da6d855db314c1b7e67634b0e6e1438a0d141d0241595987142154ee356d60955248bc SHA512 85605bd98bf2b56f5bfca23ae23d76d764d76a174b05836c8686825e912d6326c370e9cf2134c0bf4f425560be103b16bf9c9d075077f52e713a69082616e906
+DIST tensorflow-1.8.0.tar.gz 22649439 BLAKE2B 7384c2cf742fb5a8b4e266e95080ae2513c1b112ab97f74839fa8e81bd91bd24645be8afb02e4447ad5fba9f47c4d146f59aa12085937cd3a364ec34c99590f3 SHA512 7280e65d26fb3f15d95f7217ee3bc08d1424cd144cf25bf638fa114fa835b2505dfaf457c55700067d24f485b77120973d094ec568e6d1b1054857402f9c352d
+DIST tensorflow-python-license.txt 45132 BLAKE2B 1f572a06eeb4a58a5563b87a2db381a6e9eb0195cf1d006bda0d3da158e62a1e67e889a7d3d0da83d8609c0d048887bcbbd0d7056fd8e4f56b654047f36936fa SHA512 1b8c2f9733fcf27d560879418c366b5c3e44420d42adb3b857ee741793ddc75ad18324b016909457e5311f2143593392ce4404d12962f076e62f6036afbb521e
+DIST termcolor-1.1.0.tar.gz 3912 BLAKE2B 8d661a708eff02894b6e390befddaef1c9d14f78f0c7d10ac332fa149f7892109f77bd279c9a488a3aa4070569c92f0ce847e3911d419ff549a10555b34534fb SHA512 4bd06bf4405a9ef6c005cd4d159ef602f7fc7fccb3e57586da1187c402f4d0b9051ef930cae423065c51ff4be8a22ceae556a61a6b3c8c519d623c066c340b53
+DIST zlib-1.2.11.tar.gz 607698 BLAKE2B 6bfc4bca5dcadba8a0d4121a2b3ed0bfe440c261003521862c8e6381f1a6f0a72d3fc037351d30afd7ef321e8e8d2ec817c046ac749f2ca0c97fbdc2f7e840b7 SHA512 73fd3fff4adeccd4894084c15ddac89890cd10ef105dd5e1835e1e9bbb6a49ff229713bd197d203edfa17c2727700fce65a2a235f07568212d820dca88b528ae

diff --git a/sci-libs/tensorflow/files/0001-pip_package-modularize-build-script-to-allow-distros.patch b/sci-libs/tensorflow/files/0001-pip_package-modularize-build-script-to-allow-distros.patch
new file mode 100644
index 00000000000..ae17b4f4695
--- /dev/null
+++ b/sci-libs/tensorflow/files/0001-pip_package-modularize-build-script-to-allow-distros.patch
@@ -0,0 +1,211 @@
+From 226dbf58ec54af0a44ec73b3a39ec5bee7cc8454 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Tue, 1 May 2018 19:55:53 +0800
+Subject: [PATCH] pip_package: modularize build script to allow distros to
+ install more flexibly
+
+Gentoo Linux handles python modules slightly differently and packaging
+wheels is complicated. We prefer to run setup.py ourselves rather than
+build a wheel and then install from there.
+
+This modularizes build_pip_package.sh to allow running parts separately.
+using --src srcdir will prepare the package in a known dir so the distro
+package can take it from there. If only dstdir is given (either with
+--dst or as the only argument to preserve backwards compat) then
+behaviour is the same as before, the sources are prepared and the wheel
+is built and placed in dstdir.
+
+Signed-off-by: Jason Zaman <jason@perfinion.com>
+---
+ tensorflow/tools/pip_package/build_pip_package.sh | 141 ++++++++++++++++------
+ 1 file changed, 105 insertions(+), 36 deletions(-)
+
+diff --git a/tensorflow/tools/pip_package/build_pip_package.sh b/tensorflow/tools/pip_package/build_pip_package.sh
+index 3af79ee170..135846519c 100755
+--- a/tensorflow/tools/pip_package/build_pip_package.sh
++++ b/tensorflow/tools/pip_package/build_pip_package.sh
+@@ -41,42 +41,15 @@ function is_windows() {
+   fi
+ }
+ 
+-function main() {
++function prepare_src() {
+   if [ $# -lt 1 ] ; then
+     echo "No destination dir provided"
+     exit 1
+   fi
+ 
+-  DEST=$(real_path $1)
+-  TMPDIR=$(mktemp -d -t tmp.XXXXXXXXXX)
+-
+-  PKG_NAME_FLAG=""
+-  GPU_BUILD=0
+-  NIGHTLY_BUILD=0
+-  while true; do
+-    if [[ "$1" == "--nightly_flag" ]]; then
+-      NIGHTLY_BUILD=1
+-    elif [[ "$1" == "--gpu" ]]; then
+-      GPU_BUILD=1
+-    elif [[ "$1" == "--gpudirect" ]]; then
+-      PKG_NAME_FLAG="--project_name tensorflow_gpudirect"
+-    fi
+-    shift
+-
+-    if [[ -z "$1" ]]; then
+-      break
+-    fi
+-  done
+-
+-  if [[ ${NIGHTLY_BUILD} == "1" && ${GPU_BUILD} == "1" ]]; then
+-    PKG_NAME_FLAG="--project_name tf_nightly_gpu"
+-  elif [[ ${NIGHTLY_BUILD} == "1" ]]; then
+-    PKG_NAME_FLAG="--project_name tf_nightly"
+-  elif [[ ${GPU_BUILD} == "1" ]]; then
+-    PKG_NAME_FLAG="--project_name tensorflow_gpu"
+-  fi
+-
+-  echo $(date) : "=== Using tmpdir: ${TMPDIR}"
++  TMPDIR="$1"
++  mkdir -p "$TMPDIR"
++  echo $(date) : "=== Preparing sources in dir: ${TMPDIR}"
+ 
+   if [ ! -d bazel-bin/tensorflow ]; then
+     echo "Could not find bazel-bin.  Did you run from the root of the build tree?"
+@@ -148,17 +121,28 @@ function main() {
+   # over so user defined ops can be compiled.
+   mkdir -p ${TMPDIR}/google
+   mkdir -p ${TMPDIR}/third_party
+-  pushd ${RUNFILES%org_tensorflow}
++  pushd ${RUNFILES%org_tensorflow} > /dev/null
+   for header in $(find protobuf_archive -name \*.h); do
+     mkdir -p "${TMPDIR}/google/$(dirname ${header})"
+     cp "$header" "${TMPDIR}/google/$(dirname ${header})/"
+   done
+-  popd
++  popd > /dev/null
+   cp -R $RUNFILES/third_party/eigen3 ${TMPDIR}/third_party
+ 
+   cp tensorflow/tools/pip_package/MANIFEST.in ${TMPDIR}
+   cp tensorflow/tools/pip_package/README ${TMPDIR}
+   cp tensorflow/tools/pip_package/setup.py ${TMPDIR}
++}
++
++function build_wheel() {
++  if [ $# -lt 2 ] ; then
++    echo "No src and dest dir provided"
++    exit 1
++  fi
++
++  TMPDIR="$1"
++  DEST="$2"
++  PKG_NAME_FLAG="$3"
+ 
+   # Before we leave the top-level directory, make sure we know how to
+   # call python.
+@@ -166,15 +150,100 @@ function main() {
+     source tools/python_bin_path.sh
+   fi
+ 
+-  pushd ${TMPDIR}
++  pushd ${TMPDIR} > /dev/null
+   rm -f MANIFEST
+   echo $(date) : "=== Building wheel"
+   "${PYTHON_BIN_PATH:-python}" setup.py bdist_wheel ${PKG_NAME_FLAG} >/dev/null
+   mkdir -p ${DEST}
+   cp dist/* ${DEST}
+-  popd
+-  rm -rf ${TMPDIR}
++  popd > /dev/null
+   echo $(date) : "=== Output wheel file is in: ${DEST}"
+ }
+ 
++function usage() {
++  echo "Usage:"
++  echo "$0 [--src srcdir] [--dst dstdir] [options]"
++  echo "$0 dstdir [options]"
++  echo ""
++  echo "    --src          prepare sources in srcdir"
++  echo "                       will use temporary dir if not specified"
++  echo ""
++  echo "    --dst          build wheel in dstdir"
++  echo "                       if dstdir is not set do not build, only prepare sources"
++  echo ""
++  echo "  Options:"
++  echo "    --gpu          build tensorflow_gpu"
++  echo "    --gpudirect    build tensorflow_gpudirect"
++  echo "    --nightly_flag build tensorflow nightly"
++  echo ""
++  exit 1
++}
++
++function main() {
++  PKG_NAME_FLAG=""
++  GPU_BUILD=0
++  NIGHTLY_BUILD=0
++  SRCDIR=""
++  DSTDIR=""
++  CLEANSRC=1
++  while true; do
++    if [[ "$1" == "--help" ]]; then
++      usage
++      exit 1
++    elif [[ "$1" == "--nightly_flag" ]]; then
++      NIGHTLY_BUILD=1
++    elif [[ "$1" == "--gpu" ]]; then
++      GPU_BUILD=1
++    elif [[ "$1" == "--gpudirect" ]]; then
++      PKG_NAME_FLAG="--project_name tensorflow_gpudirect"
++    elif [[ "$1" == "--src" ]]; then
++      shift
++      SRCDIR="$(real_path $1)"
++      CLEANSRC=0
++    elif [[ "$1" == "--dst" ]]; then
++      shift
++      DSTDIR="$(real_path $1)"
++    else
++      DSTDIR="$(real_path $1)"
++    fi
++    shift
++
++    if [[ -z "$1" ]]; then
++      break
++    fi
++  done
++
++  if [[ -z "$DSTDIR" ]] && [[ -z "$SRCDIR" ]]; then
++    echo "No destination dir provided"
++    usage
++    exit 1
++  fi
++
++  if [[ -z "$SRCDIR" ]]; then
++    # make temp srcdir if none set
++    SRCDIR="$(mktemp -d -t tmp.XXXXXXXXXX)"
++  fi
++
++  prepare_src "$SRCDIR"
++
++  if [[ -z "$DSTDIR" ]]; then
++      # only want to prepare sources
++      exit
++  fi
++
++  if [[ ${NIGHTLY_BUILD} == "1" && ${GPU_BUILD} == "1" ]]; then
++    PKG_NAME_FLAG="--project_name tf_nightly_gpu"
++  elif [[ ${NIGHTLY_BUILD} == "1" ]]; then
++    PKG_NAME_FLAG="--project_name tf_nightly"
++  elif [[ ${GPU_BUILD} == "1" ]]; then
++    PKG_NAME_FLAG="--project_name tensorflow_gpu"
++  fi
++
++  build_wheel "$SRCDIR" "$DSTDIR" "$PKG_NAME_FLAG"
++
++  if [[ $CLEANSRC -ne 0 ]]; then
++    rm -rf ${TMPDIR}
++  fi
++}
++
+ main "$@"
+-- 
+2.16.1
+

diff --git a/sci-libs/tensorflow/metadata.xml b/sci-libs/tensorflow/metadata.xml
new file mode 100644
index 00000000000..e7cdd82763c
--- /dev/null
+++ b/sci-libs/tensorflow/metadata.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE pkgmetadata SYSTEM "http://www.gentoo.org/dtd/metadata.dtd">
+<pkgmetadata>
+	<maintainer type="person">
+		<email>perfinion@gentoo.org</email>
+	</maintainer>
+	<longdescription lang="en">
+		An open source machine learning framework for everyone
+	</longdescription>
+	<use>
+		<flag name="cuda">Enable support for nVidia CUDA</flag>
+		<flag name="mpi">Enable support for MPI</flag>
+		<flag name="jemalloc">Enable support for <pkg>dev-libs/jemalloc</pkg></flag>
+	</use>
+</pkgmetadata>

diff --git a/sci-libs/tensorflow/tensorflow-1.8.0.ebuild b/sci-libs/tensorflow/tensorflow-1.8.0.ebuild
new file mode 100644
index 00000000000..ae227d94b3f
--- /dev/null
+++ b/sci-libs/tensorflow/tensorflow-1.8.0.ebuild
@@ -0,0 +1,302 @@
+# Copyright 1999-2018 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=6
+
+PYTHON_COMPAT=( python3_{5,6} )
+MY_PV=${PV/_rc/-rc}
+MY_P=${PN}-${MY_PV}
+
+inherit distutils-r1 multiprocessing toolchain-funcs
+
+DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
+HOMEPAGE="https://www.tensorflow.org/"
+
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+IUSE="cuda jemalloc mpi"
+CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
+for i in $CPU_USE_FLAGS_X86; do
+	IUSE+=" cpu_flags_x86_$i"
+done
+
+# distfiles that bazel uses for the workspace, will be copied to basel-distdir
+bazel_external_uris="https://github.com/abseil/abseil-py/archive/acec853355ef987eae48a8d87a79351c15dff593.tar.gz -> abseil_py-acec853355ef987eae48a8d87a79351c15dff593.tar.gz
+	http://ftp.exim.org/pub/pcre/pcre-8.39.tar.gz
+	http://pilotfiber.dl.sourceforge.net/project/giflib/giflib-5.1.4.tar.gz
+	http://pkgs.fedoraproject.org/repo/pkgs/nasm/nasm-2.12.02.tar.bz2/d15843c3fb7db39af80571ee27ec6fad/nasm-2.12.02.tar.bz2
+	http://ufpr.dl.sourceforge.net/project/swig/swig/swig-3.0.8/swig-3.0.8.tar.gz
+	http://www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz
+	http://www.sqlite.org/2017/sqlite-amalgamation-3200000.zip
+	https://bitbucket.org/eigen/eigen/get/6913f0cf7d06.tar.gz -> eigen-6913f0cf7d06.tar.gz
+	https://curl.haxx.se/download/curl-7.49.1.tar.gz
+	https://github.com/LMDB/lmdb/archive/LMDB_0.9.19.tar.gz
+	https://github.com/NVlabs/cub/archive/1.8.0.zip -> cub-1.8.0.zip
+	https://github.com/abseil/abseil-cpp/archive/720c017e30339fd1786ce4aac68bc8559736e53f.tar.gz -> abseil_cpp-720c017e30339fd1786ce4aac68bc8559736e53f.tar.gz
+	https://github.com/aws/aws-sdk-cpp/archive/1.3.15.tar.gz -> aws_sdk_cpp-1.3.15.tar.gz
+	https://github.com/bazelbuild/rules_closure/archive/08039ba8ca59f64248bb3b6ae016460fe9c9914f.tar.gz
+	https://github.com/cython/cython/archive/3732784c45cfb040a5b0936951d196f83a12ea17.tar.gz -> cython-3732784c45cfb040a5b0936951d196f83a12ea17.tar.gz
+	https://github.com/edenhill/librdkafka/archive/v0.11.1.tar.gz -> librdkafka-v0.11.1.tar.gz
+	https://github.com/glennrp/libpng/archive/v1.6.34.tar.gz -> libpng-v1.6.34.tar.gz
+	https://github.com/google/boringssl/archive/a0fb951d2a26a8ee746b52f3ba81ab011a0af778.tar.gz -> boringssl-a0fb951d2a26a8ee746b52f3ba81ab011a0af778.tar.gz
+	https://github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz -> farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz
+	https://github.com/google/flatbuffers/archive/971a68110e4fc1bace10fcb6deeb189e7e1a34ce.tar.gz -> flatbuffers-971a68110e4fc1bace10fcb6deeb189e7e1a34ce.tar.gz
+	https://github.com/google/gemmlowp/archive/7c7c744640ddc3d0af18fb245b4d23228813a71b.zip -> gemmlowp-7c7c744640ddc3d0af18fb245b4d23228813a71b.zip
+	https://github.com/google/highwayhash/archive/dfcb97ca4fe9277bf9dc1802dd979b071896453b.tar.gz -> highwayhash-dfcb97ca4fe9277bf9dc1802dd979b071896453b.tar.gz
+	https://github.com/google/nsync/archive/0559ce013feac8db639ee1bf776aca0325d28777.tar.gz -> nsync-0559ce013feac8db639ee1bf776aca0325d28777.tar.gz
+	https://github.com/google/protobuf/archive/396336eb961b75f03b25824fe86cf6490fb75e3a.tar.gz -> protobuf-396336eb961b75f03b25824fe86cf6490fb75e3a.tar.gz
+	https://github.com/google/re2/archive/26cd968b735e227361c9703683266f01e5df7857.tar.gz -> re2-26cd968b735e227361c9703683266f01e5df7857.tar.gz
+	https://github.com/google/snappy/archive/1.1.7.tar.gz -> snappy-1.1.7.tar.gz
+	https://github.com/grpc/grpc/archive/bd6bdf93279a39a8cd92978fd7c9d14eccd98fc2.tar.gz -> grpc-bd6bdf93279a39a8cd92978fd7c9d14eccd98fc2.tar.gz
+	https://github.com/hfp/libxsmm/archive/1.8.1.tar.gz -> libxsmm-1.8.1.tar.gz
+	https://github.com/intel/ARM_NEON_2_x86_SSE/archive/0f77d9d182265259b135dad949230ecbf1a2633d.tar.gz -> ARM_NEON_2_x86_SSE-0f77d9d182265259b135dad949230ecbf1a2633d.tar.gz
+	https://github.com/intel/mkl-dnn/archive/v0.12.tar.gz -> mkl_dnn-v0.12.tar.gz
+	https://github.com/jemalloc/jemalloc/archive/4.4.0.tar.gz -> jemalloc-4.4.0.tar.gz
+	https://github.com/libjpeg-turbo/libjpeg-turbo/archive/1.5.1.tar.gz -> libjpeg_turbo-1.5.1.tar.gz
+	https://github.com/llvm-mirror/llvm/archive/7e78daafdd22f3f17720a103d29d89590534004e.tar.gz -> llvm-7e78daafdd22f3f17720a103d29d89590534004e.tar.gz
+	https://github.com/open-source-parsers/jsoncpp/archive/11086dd6a7eba04289944367ca82cea71299ed70.tar.gz -> jsoncpp-11086dd6a7eba04289944367ca82cea71299ed70.tar.gz
+	https://mirror.bazel.build/docs.python.org/2.7/_sources/license.txt -> tensorflow-python-license.txt
+	https://pypi.python.org/packages/5c/78/ff794fcae2ce8aa6323e789d1f8b3b7765f601e7702726f430e814822b96/gast-0.2.0.tar.gz
+	https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz
+	https://pypi.python.org/packages/bc/cc/3cdb0a02e7e96f6c70bd971bc8a90b8463fda83e264fa9c5c1c98ceabd81/backports.weakref-1.0rc1.tar.gz
+	https://pypi.python.org/packages/d8/be/c4276b3199ec3feee2a88bc64810fbea8f26d961e0a4cd9c68387a9f35de/astor-0.6.2.tar.gz
+	https://pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz
+	https://zlib.net/zlib-1.2.11.tar.gz
+"
+
+SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
+		${bazel_external_uris}"
+
+RDEPEND="
+	app-arch/snappy
+	dev-db/lmdb
+	dev-db/sqlite
+	dev-libs/libpcre
+	dev-libs/protobuf
+	dev-libs/protobuf-c
+	dev-libs/re2
+	dev-python/absl-py[${PYTHON_USEDEP}]
+	dev-python/numpy[${PYTHON_USEDEP}]
+	dev-python/protobuf-python[${PYTHON_USEDEP}]
+	dev-python/six[${PYTHON_USEDEP}]
+	dev-python/termcolor[${PYTHON_USEDEP}]
+	media-libs/giflib
+	media-libs/libpng:0
+	net-libs/grpc[${PYTHON_USEDEP}]
+	net-misc/curl
+	sys-libs/zlib
+	virtual/jpeg:0
+	cuda? ( >=dev-util/nvidia-cuda-toolkit-8.0.61[profiler] >=dev-libs/cudnn-6.0 )
+	jemalloc? ( >=dev-libs/jemalloc-4.4.0 )
+	mpi? ( virtual/mpi )"
+DEPEND="${RDEPEND}
+	>=dev-util/bazel-0.13.0
+	dev-java/java-config
+	dev-lang/nasm
+	dev-lang/swig"
+
+S="${WORKDIR}/${MY_P}"
+
+DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
+PATCHES=(
+	"${FILESDIR}/0001-pip_package-modularize-build-script-to-allow-distros.patch"
+)
+
+setup_bazelrc() {
+	if [[ -f "${T}/bazelrc" ]]; then
+		return
+	fi
+
+	# F: fopen_wr
+	# P: /proc/self/setgroups
+	# Even with standalone enabled, the Bazel sandbox binary is run for feature test:
+	# https://github.com/bazelbuild/bazel/blob/7b091c1397a82258e26ab5336df6c8dae1d97384/src/main/java/com/google/devtools/build/lib/sandbox/LinuxSandboxedSpawnRunner.java#L61
+	# https://github.com/bazelbuild/bazel/blob/76555482873ffcf1d32fb40106f89231b37f850a/src/main/tools/linux-sandbox-pid1.cc#L113
+	addpredict /proc
+
+	echo "startup --batch" > "${T}/bazelrc" || die
+
+	# make bazel respect $MAKEOPTS
+	echo "build --jobs=$(makeopts_jobs)" >> "${T}/bazelrc" || die
+
+	# Use standalone strategy to deactivate the bazel sandbox, since it
+	# conflicts with FEATURES=sandbox.
+	echo "build --verbose_failures --spawn_strategy=standalone --genrule_strategy=standalone" >> "${T}/bazelrc" || die
+	echo "build --noshow_loading_progress" >> "${T}/bazelrc" || die
+	echo "test --verbose_failures --spawn_strategy=standalone --genrule_strategy=standalone" >> "${T}/bazelrc" || die
+	echo "test --verbose_test_summary --noshow_loading_progress" >> "${T}/bazelrc" || die
+
+	# make bazel only fetch distfiles from the cache
+	mkdir -p "${T}/bazel-cache" || die
+	mkdir -p "${T}/bazel-distdir" || die
+	echo "fetch --repository_cache=${T}/bazel-cache/ --experimental_distdir=${T}/bazel-distdir/" >> "${T}/bazelrc" || die
+	echo "build --repository_cache=${T}/bazel-cache/ --experimental_distdir=${T}/bazel-distdir/" >> "${T}/bazelrc" || die
+}
+
+bazel_multibuild_wrapper() {
+	BAZEL_OUTPUT_BASE="${WORKDIR}/bazel-base-${MULTIBUILD_VARIANT}"
+	mkdir -p "${BAZEL_OUTPUT_BASE}" || die
+
+	run_in_build_dir $@
+}
+
+ebazel() {
+	setup_bazelrc
+
+	echo Running: bazel --bazelrc="${T}/bazelrc" --output_base="${BAZEL_OUTPUT_BASE}" "$@"
+	bazel --bazelrc="${T}/bazelrc" --output_base="${BAZEL_OUTPUT_BASE}" $@ || die
+}
+
+load_distfiles() {
+	# populate the bazel distdir to fetch from since it cannot use the network
+	local s d uri rename
+
+	while read uri rename d; do
+		[[ -z "$uri" ]] && continue
+		if [[ "$rename" == "->" ]]; then
+			s="${uri##*/}"
+			einfo "Copying $d to bazel distdir $s ..."
+		else
+			s="${uri##*/}"
+			d="${s}"
+			einfo "Copying $d to bazel distdir ..."
+		fi
+		cp "${DISTDIR}/${d}" "${T}/bazel-distdir/${s}" || die
+	done <<< "${bazel_external_uris}"
+}
+
+pkg_setup() {
+	export JAVA_HOME=$(java-config --jre-home)
+}
+
+src_unpack() {
+	# only unpack the main distfile
+	unpack "${P}.tar.gz"
+}
+
+src_prepare() {
+	BAZEL_OUTPUT_BASE="${WORKDIR}/bazel-base"
+	mkdir -p "${BAZEL_OUTPUT_BASE}" || die
+	setup_bazelrc
+	load_distfiles
+
+	default
+	python_copy_sources
+}
+
+src_configure() {
+	do_configure() {
+		local cc_opt_flags=( ${CFLAGS} )
+
+		# Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
+		for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
+			use cpu_flags_x86_${i} && cc_opt_flags+=( -m${i/_/.} )
+		done
+		use cpu_flags_x86_fma3 && cc_opt_flags+=( -mfma )
+
+		python_export PYTHON_SITEDIR
+		export CC_OPT_FLAGS="${cc_opt_flags[*]}"
+		export GCC_HOST_COMPILER_PATH=$(tc-getCC)
+		export TF_NEED_JEMALLOC=$(usex jemalloc 1 0)
+		export TF_NEED_GCP=0
+		export TF_NEED_HDFS=0
+		export TF_NEED_S3=0
+		export TF_NEED_KAFKA=0
+		export TF_ENABLE_XLA=0
+		export TF_NEED_GDR=0
+		export TF_NEED_VERBS=0
+		export TF_NEED_OPENCL_SYCL=0
+		export TF_NEED_OPENCL=0
+		export TF_NEED_COMPUTECPP=0
+		export TF_NEED_MKL=0
+		export TF_NEED_MPI=$(usex mpi 1 0)
+		export TF_DOWNLOAD_CLANG=0
+		export TF_NEED_CUDA=$(usex cuda 1 0)
+		export TF_SET_ANDROID_WORKSPACE=0
+		export PYTHON_BIN_PATH="${PYTHON}"
+		export PYTHON_LIB_PATH="${PYTHON_SITEDIR}"
+
+		# this is not autoconf
+		./configure || die
+	}
+	python_foreach_impl bazel_multibuild_wrapper do_configure
+}
+
+src_compile() {
+	python_setup
+	local MULTIBUILD_VARIANT="${EPYTHON/./_}"
+	cd "${S}-${MULTIBUILD_VARIANT}" || die
+	BAZEL_OUTPUT_BASE="${WORKDIR}/bazel-base-${MULTIBUILD_VARIANT}"
+
+	ebazel build \
+		--config=opt $(usex cuda --config=cuda '') \
+		//tensorflow:libtensorflow_framework.so \
+		//tensorflow:libtensorflow.so \
+		//tensorflow:libtensorflow_cc.so
+
+	do_compile() {
+		cd "${S}-${MULTIBUILD_VARIANT}" || die
+		ebazel build \
+			--config=opt $(usex cuda --config=cuda '') \
+			//tensorflow/tools/pip_package:build_pip_package
+	}
+	python_foreach_impl bazel_multibuild_wrapper do_compile
+}
+
+src_install() {
+	do_install() {
+		einfo "Installing ${EPYTHON} files"
+		local srcdir="${T}/src-${EPYTHON/./_}"
+		mkdir -p "${srcdir}" || die
+		bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
+		cd "${srcdir}" || die
+		esetup.py install
+
+		# it installs site-packages/external but shouldnt
+		python_export PYTHON_SITEDIR
+		rm -rf "${D}/${PYTHON_SITEDIR}/external" || die
+		sed -i '/^external/d' "${D}/${PYTHON_SITEDIR}"/${P}-*.egg-info/{SOURCES,top_level}.txt || die
+
+		# symlink to the main .so file
+		rm -rf "${D}/${PYTHON_SITEDIR}/${PN}/lib${PN}_framework.so" || die
+		dosym "../../../lib${PN}_framework.so" "${PYTHON_SITEDIR}/${PN}/lib${PN}_framework.so" || die
+
+		python_optimize
+	}
+	python_foreach_impl bazel_multibuild_wrapper do_install
+
+	# symlink to python-exec scripts
+	for i in "${D}"/usr/lib/python-exec/*/*; do
+		n="${i##*/}"
+		[[ -e "${D}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/$n"
+	done
+
+	python_setup
+	local MULTIBUILD_VARIANT="${EPYTHON/./_}"
+	cd "${S}-${MULTIBUILD_VARIANT}" || die
+	BAZEL_OUTPUT_BASE="${WORKDIR}/bazel-base-${MULTIBUILD_VARIANT}"
+
+	einfo "Installing headers"
+	# install c c++ and core header files
+	for i in $(find ${PN}/{c,cc,core} -name "*.h"); do
+		insinto /usr/include/${PN}/${i%/*}
+		doins ${i}
+	done
+
+	# eigen headers
+	insinto /usr/include/${PN}/third_party/eigen3/Eigen/
+	doins third_party/eigen3/Eigen/*
+
+	einfo "Installing libs"
+	# generate pkg-config file
+	${PN}/c/generate-pc.sh --prefix=/usr --version=${MY_PV} || die
+	insinto /usr/$(get_libdir)/pkgconfig
+	doins ${PN}.pc
+
+	dolib.so bazel-bin/tensorflow/lib${PN}_framework.so
+	dolib.so bazel-bin/tensorflow/lib${PN}.so
+	dolib.so bazel-bin/tensorflow/lib${PN}_cc.so
+
+	einstalldocs
+}


^ permalink raw reply related	[flat|nested] 11+ messages in thread

* [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/
@ 2018-05-20 13:46 Jason Zaman
  0 siblings, 0 replies; 11+ messages in thread
From: Jason Zaman @ 2018-05-20 13:46 UTC (permalink / raw
  To: gentoo-commits

commit:     7379cdb578b0c070c846c3fa9f71470e2c5d1320
Author:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
AuthorDate: Sat May 19 10:21:51 2018 +0000
Commit:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
CommitDate: Sun May 20 13:45:52 2018 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=7379cdb5

sci-libs/tensorflow: bazel build fixes

Properly include bazelrc, only one is read so include our generated one
instead.
dont sanitize $HOME otherwise .keras ends up in /var/tmp/portage/ (bug
655368)
patch BUILD file so it doesnt strip the outputs

Closes: https://bugs.gentoo.org/655368
Package-Manager: Portage-2.3.24, Repoman-2.3.6

 .../files/tensorflow-1.8.0-0002-dont-strip.patch   | 29 +++++++++++++++
 ...low-1.8.0.ebuild => tensorflow-1.8.0-r1.ebuild} | 42 ++++++++++++++--------
 2 files changed, 56 insertions(+), 15 deletions(-)

diff --git a/sci-libs/tensorflow/files/tensorflow-1.8.0-0002-dont-strip.patch b/sci-libs/tensorflow/files/tensorflow-1.8.0-0002-dont-strip.patch
new file mode 100644
index 00000000000..9f828f3c8ef
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-1.8.0-0002-dont-strip.patch
@@ -0,0 +1,29 @@
+diff --git a/tensorflow/BUILD b/tensorflow/BUILD
+index cfafffdd13..8ed22ccfea 100644
+--- a/tensorflow/BUILD
++++ b/tensorflow/BUILD
+@@ -470,7 +470,7 @@ tf_cc_shared_object(
+ # excludes all but a subset of function names.
+ # On MacOS, the linker does not support version_script, but has an
+ # an "-exported_symbols_list" command.  -z defs disallows undefined
+-# symbols in object files and -s strips the output.
++# symbols in object files.
+ 
+ tf_cc_shared_object(
+     name = "libtensorflow.so",
+@@ -484,7 +484,6 @@ tf_cc_shared_object(
+         "//tensorflow:windows_msvc": [],
+         "//conditions:default": [
+             "-z defs",
+-            "-s",
+             "-Wl,--version-script",  #  This line must be directly followed by the version_script.lds file
+             "$(location //tensorflow/c:version_script.lds)",
+         ],
+@@ -510,7 +509,6 @@ tf_cc_shared_object(
+         "//tensorflow:windows_msvc": [],
+         "//conditions:default": [
+             "-z defs",
+-            "-s",
+             "-Wl,--version-script",  #  This line must be directly followed by the version_script.lds file
+             "$(location //tensorflow:tf_version_script.lds)",
+         ],

diff --git a/sci-libs/tensorflow/tensorflow-1.8.0.ebuild b/sci-libs/tensorflow/tensorflow-1.8.0-r1.ebuild
similarity index 91%
rename from sci-libs/tensorflow/tensorflow-1.8.0.ebuild
rename to sci-libs/tensorflow/tensorflow-1.8.0-r1.ebuild
index c12a3e04ae0..98643e5d2ce 100644
--- a/sci-libs/tensorflow/tensorflow-1.8.0.ebuild
+++ b/sci-libs/tensorflow/tensorflow-1.8.0-r1.ebuild
@@ -3,7 +3,7 @@
 
 EAPI=6
 
-PYTHON_COMPAT=( python3_{5,6} )
+PYTHON_COMPAT=( python2_7 python3_{5,6} )
 MY_PV=${PV/_rc/-rc}
 MY_P=${PN}-${MY_PV}
 
@@ -101,6 +101,7 @@ S="${WORKDIR}/${MY_P}"
 DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
 PATCHES=(
 	"${FILESDIR}/0001-pip_package-modularize-build-script-to-allow-distros.patch"
+	"${FILESDIR}/tensorflow-1.8.0-0002-dont-strip.patch"
 )
 
 bazel-get-cpu-flags() {
@@ -140,24 +141,32 @@ setup_bazelrc() {
 	# https://github.com/bazelbuild/bazel/blob/76555482873ffcf1d32fb40106f89231b37f850a/src/main/tools/linux-sandbox-pid1.cc#L113
 	addpredict /proc
 
-	echo "startup --batch" > "${T}/bazelrc" || die
+	mkdir -p "${T}/bazel-cache" || die
+	mkdir -p "${T}/bazel-distdir" || die
+
+	cat > "${T}/bazelrc" <<-EOF
+	startup --batch
+
+	# dont strip HOME, portage sets a temp per-package dir
+	build --action_env HOME
 
-	# make bazel respect $MAKEOPTS
-	echo "build --jobs=$(makeopts_jobs) $(bazel-get-flags)" >> "${T}/bazelrc" || die
-	echo "build --compilation_mode=opt --host_compilation_mode=opt" >> "${T}/bazelrc" || die
+	# make bazel respect MAKEOPTS
+	build --jobs=$(makeopts_jobs) $(bazel-get-flags)
+	build --compilation_mode=opt --host_compilation_mode=opt
 
 	# Use standalone strategy to deactivate the bazel sandbox, since it
 	# conflicts with FEATURES=sandbox.
-	echo "build --verbose_failures --spawn_strategy=standalone --genrule_strategy=standalone" >> "${T}/bazelrc" || die
-	echo "build --noshow_loading_progress" >> "${T}/bazelrc" || die
-	echo "test --verbose_failures --spawn_strategy=standalone --genrule_strategy=standalone" >> "${T}/bazelrc" || die
-	echo "test --verbose_test_summary --noshow_loading_progress" >> "${T}/bazelrc" || die
+	build --spawn_strategy=standalone --genrule_strategy=standalone
+	test --spawn_strategy=standalone --genrule_strategy=standalone
+
+	build --strip=never
+	build --verbose_failures --noshow_loading_progress
+	test --verbose_test_summary --verbose_failures --noshow_loading_progress
 
 	# make bazel only fetch distfiles from the cache
-	mkdir -p "${T}/bazel-cache" || die
-	mkdir -p "${T}/bazel-distdir" || die
-	echo "fetch --repository_cache=${T}/bazel-cache/ --experimental_distdir=${T}/bazel-distdir/" >> "${T}/bazelrc" || die
-	echo "build --repository_cache=${T}/bazel-cache/ --experimental_distdir=${T}/bazel-distdir/" >> "${T}/bazelrc" || die
+	fetch --repository_cache=${T}/bazel-cache/ --experimental_distdir=${T}/bazel-distdir/
+	build --repository_cache=${T}/bazel-cache/ --experimental_distdir=${T}/bazel-distdir/
+	EOF
 }
 
 bazel_multibuild_wrapper() {
@@ -170,8 +179,8 @@ bazel_multibuild_wrapper() {
 ebazel() {
 	setup_bazelrc
 
-	echo Running: bazel --bazelrc="${T}/bazelrc" --output_base="${BAZEL_OUTPUT_BASE}" "$@"
-	bazel --bazelrc="${T}/bazelrc" --output_base="${BAZEL_OUTPUT_BASE}" $@ || die
+	echo Running: bazel --output_base="${BAZEL_OUTPUT_BASE}" "$@"
+	bazel --output_base="${BAZEL_OUTPUT_BASE}" $@ || die
 }
 
 load_distfiles() {
@@ -235,6 +244,9 @@ src_configure() {
 		export PYTHON_BIN_PATH="${PYTHON}"
 		export PYTHON_LIB_PATH="${PYTHON_SITEDIR}"
 
+		# only one bazelrc is read, import our one before configure sets its options
+		echo "import ${T}/bazelrc" >> ./.bazelrc
+
 		# this is not autoconf
 		./configure || die
 	}


^ permalink raw reply related	[flat|nested] 11+ messages in thread

* [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/
@ 2019-06-19  8:41 Jason Zaman
  0 siblings, 0 replies; 11+ messages in thread
From: Jason Zaman @ 2019-06-19  8:41 UTC (permalink / raw
  To: gentoo-commits

commit:     9d23ef3049aa6719ec03b3bcadff19d4772d28ee
Author:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
AuthorDate: Wed Jun 19 08:37:41 2019 +0000
Commit:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
CommitDate: Wed Jun 19 08:38:31 2019 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=9d23ef30

sci-libs/tensorflow: bump 1.14.0

Signed-off-by: Jason Zaman <perfinion <AT> gentoo.org>
Package-Manager: Portage-2.3.66, Repoman-2.3.11

 sci-libs/tensorflow/Manifest                                   |  2 +-
 ...=> tensorflow-1.14.0-0001-systemlibs-unbundle-enum34.patch} |  2 +-
 .../{tensorflow-1.14.0_rc0.ebuild => tensorflow-1.14.0.ebuild} | 10 ++++++++--
 3 files changed, 10 insertions(+), 4 deletions(-)

diff --git a/sci-libs/tensorflow/Manifest b/sci-libs/tensorflow/Manifest
index 2d9fb90edac..3ec2a37712a 100644
--- a/sci-libs/tensorflow/Manifest
+++ b/sci-libs/tensorflow/Manifest
@@ -22,5 +22,5 @@ DIST rules_docker-a9bb1dab84cdf46e34d1b34b53a17bda129b5eba.tar.gz 446437 BLAKE2B
 DIST rules_docker-b8ff6a85ec359db3fd5657accd3e524daf12016d.tar.gz 460121 BLAKE2B bca31bbdd9069647fa9560002e63a404e2bd4d4ebd19d90e28d08f797f8f63157cc127dab26b996bb976e50f099928a91c1d6343427cb85048c6ca2b47c0c7e2 SHA512 461a663189e5995e31ee8175ee260e1ad06b00ee8a7548b9d3a946a2de693311031ec6eba89e3bad527f5d8f76ed25626ef4ea05a8d6e7579932dc1be3b0a6ba
 DIST tensorflow-1.13.1.tar.gz 31614228 BLAKE2B 3e78309744747afd5a0929eebccb414efe51320f9291424ef6e3cb848f265aaac5ff16a7064c7aac8094f06b1edb61aa9fe22a55c28c23cfc4e2a6f640f4f12d SHA512 1e5c020a82b0b502c9c6fc9439905e19e8f1fb07e32740a5e2c70f4a3565c8c87053ccdcad360e95550dad648154e399ca8e352fb97a1788acfec3b87bf323bb
 DIST tensorflow-1.14.0-python-license.rst.txt 45132 BLAKE2B 8a8ace4b64fd9eda1932272704dd4fea002d2562cd9c2c94470d3ff5de58bb171ff849d3f998ea90cd779d1084d39af1267aca8400b74b701205d2e76e6a2628 SHA512 c79c42f4ceecf33c8f8778ca17b53595b75d5d0bdf30b11058cf01dd70a6351f530ca9dd846460aba14edc1d5876f7bf6fa0f9f49f7915590b0f54d267ce3856
-DIST tensorflow-1.14.0_rc0.tar.gz 41329600 BLAKE2B 97c85ad9b7cb6e3bdc54fa176070106853ab4f3c30e93220e028627edf55deda7a7d675d42e8ea1c2d030ed1bf3e697e9016ef9ce629d3a98055c4e88c0db9ba SHA512 27be21cdef1bde50c58fffb28ce0b9dd142e2c24506c5042da2b334d46fbf9db06a420450716d93a5806eab8feca3cd03ae7330d3444af07ad561fe0fffd681f
+DIST tensorflow-1.14.0.tar.gz 41335204 BLAKE2B d91d84c5d86c838cc659c45e28cf3c8a9ec9a02f5854a1826680806dae41cdd00ea49a6d2c04f2019b3400c6e267c6ca6f3b28e43b244569dca174ff7f2c59d6 SHA512 ac9ea5a2d1c761aaafbdc335259e29c128127b8d069ec5b206067935180490aa95e93c7e13de57f7f54ce4ba4f34a822face22b4a028f60185edb380e5cd4787
 DIST tensorflow-python-license.rst.txt 45132 BLAKE2B 770ddd87adc745a021daac01a284131825a33f176002a28b20a2251246183bf7ab4686305620678285e4a216a37f01b3ad953fe7864fa79611cccde156d69ab6 SHA512 2d5a3783ec340f24797e89c78007ee95ab753eb5ee8198c739544f9e1159895f7abe816d6b3c66620f8d7b64641d2a7f44ff0a5b4f2f941928f4733157c7995e

diff --git a/sci-libs/tensorflow/files/tensorflow-1.14_rc0-0001-systemlibs-unbundle-enum34.patch b/sci-libs/tensorflow/files/tensorflow-1.14.0-0001-systemlibs-unbundle-enum34.patch
similarity index 96%
rename from sci-libs/tensorflow/files/tensorflow-1.14_rc0-0001-systemlibs-unbundle-enum34.patch
rename to sci-libs/tensorflow/files/tensorflow-1.14.0-0001-systemlibs-unbundle-enum34.patch
index b34eefd2665..a220bca57d6 100644
--- a/sci-libs/tensorflow/files/tensorflow-1.14_rc0-0001-systemlibs-unbundle-enum34.patch
+++ b/sci-libs/tensorflow/files/tensorflow-1.14.0-0001-systemlibs-unbundle-enum34.patch
@@ -1,4 +1,4 @@
-From 6339084d186e518136e13b761220423d6b5c849a Mon Sep 17 00:00:00 2001
+From 2956ac52f8495e08d5328f672c1366c52c9f8cfe Mon Sep 17 00:00:00 2001
 From: Jason Zaman <jason@perfinion.com>
 Date: Wed, 29 May 2019 19:27:11 +0800
 Subject: [PATCH] systemlibs: unbundle enum34

diff --git a/sci-libs/tensorflow/tensorflow-1.14.0_rc0.ebuild b/sci-libs/tensorflow/tensorflow-1.14.0.ebuild
similarity index 97%
rename from sci-libs/tensorflow/tensorflow-1.14.0_rc0.ebuild
rename to sci-libs/tensorflow/tensorflow-1.14.0.ebuild
index 3403ae52fdb..93428020020 100644
--- a/sci-libs/tensorflow/tensorflow-1.14.0_rc0.ebuild
+++ b/sci-libs/tensorflow/tensorflow-1.14.0.ebuild
@@ -65,7 +65,6 @@ RDEPEND="
 	>=net-libs/grpc-1.16.0
 	net-misc/curl
 	sys-libs/zlib
-	dev-python/wrapt
 	>=sys-apps/hwloc-2
 	cuda? (
 		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
@@ -84,6 +83,7 @@ RDEPEND="
 		dev-python/six[${PYTHON_USEDEP}]
 		dev-python/termcolor[${PYTHON_USEDEP}]
 		dev-python/grpcio[${PYTHON_USEDEP}]
+		>=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
 		>=net-libs/google-cloud-cpp-0.9.0
 		>=sci-libs/keras-applications-1.0.6[${PYTHON_USEDEP}]
 		>=sci-libs/keras-preprocessing-1.0.5[${PYTHON_USEDEP}]
@@ -102,6 +102,11 @@ BDEPEND="
 	dev-python/mock
 	dev-lang/swig
 	dev-python/cython
+	|| (
+		=dev-util/bazel-0.24*
+		=dev-util/bazel-0.26*
+		=dev-util/bazel-0.27*
+	)
 	cuda? (
 		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
 	)
@@ -114,7 +119,7 @@ REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
 S="${WORKDIR}/${MY_P}"
 
 PATCHES=(
-	"${FILESDIR}/tensorflow-1.14_rc0-0001-systemlibs-unbundle-enum34.patch"
+	"${FILESDIR}/tensorflow-1.14.0-0001-systemlibs-unbundle-enum34.patch"
 )
 DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
 CHECKREQS_MEMORY="5G"
@@ -242,6 +247,7 @@ src_configure() {
 
 		echo 'build --config=noaws --config=nohdfs --config=noignite --config=nokafka' >> .bazelrc || die
 		echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
+		echo 'build --incompatible_no_support_tools_in_action_inputs=false' >> .bazelrc || die
 	}
 	if use python; then
 		python_foreach_impl run_in_build_dir do_configure


^ permalink raw reply related	[flat|nested] 11+ messages in thread

* [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/
@ 2019-08-03 12:04 Jason Zaman
  0 siblings, 0 replies; 11+ messages in thread
From: Jason Zaman @ 2019-08-03 12:04 UTC (permalink / raw
  To: gentoo-commits

commit:     b684f751b6ad2029191c65b50ce0eacc4de001a3
Author:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
AuthorDate: Fri Aug  2 14:21:01 2019 +0000
Commit:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
CommitDate: Sat Aug  3 11:55:38 2019 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=b684f751

sci-libs/tensorflow-1.14.0-r1: Fix build issues

- Header paths were mangled
- Jsoncpp-1.9 build error
- Newer GRPC needed a bumped google-cloud-cpp dep
- Build failed with python2 if future was not installed
- Add tensorflow_cc pkgconfig file

Closes: https://bugs.gentoo.org/688594
Closes: https://bugs.gentoo.org/689282
Closes: https://bugs.gentoo.org/690212
Package-Manager: Portage-2.3.69, Repoman-2.3.16
Signed-off-by: Jason Zaman <perfinion <AT> gentoo.org>

 ...ll_headers-fix-paths-of-generated-headers.patch | 38 ++++++++++++++++++
 ...jsoncpp-update-header-symlinks-for-jsoncp.patch | 27 +++++++++++++
 ...g-generate-tensorflow_cc-pkg-config-entry.patch | 46 ++++++++++++++++++++++
 ...source-builtins-does-not-exist-in-python2.patch | 25 ++++++++++++
 ...w-1.14.0.ebuild => tensorflow-1.14.0-r1.ebuild} | 16 +++++---
 5 files changed, 146 insertions(+), 6 deletions(-)

diff --git a/sci-libs/tensorflow/files/tensorflow-1.14.0-0002-install_headers-fix-paths-of-generated-headers.patch b/sci-libs/tensorflow/files/tensorflow-1.14.0-0002-install_headers-fix-paths-of-generated-headers.patch
new file mode 100644
index 00000000000..566f2874376
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-1.14.0-0002-install_headers-fix-paths-of-generated-headers.patch
@@ -0,0 +1,38 @@
+From 5176e1feb22fac2f9d4ca9d8ad261ef6893d3c76 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Mon, 29 Jul 2019 01:17:06 +0800
+Subject: [PATCH 2/3] install_headers: fix paths of generated headers
+
+Signed-off-by: Jason Zaman <jason@perfinion.com>
+---
+ tensorflow/BUILD | 7 +++++--
+ 1 file changed, 5 insertions(+), 2 deletions(-)
+
+diff --git a/tensorflow/BUILD b/tensorflow/BUILD
+index a04ddf9f8a..1e0cc9207b 100644
+--- a/tensorflow/BUILD
++++ b/tensorflow/BUILD
+@@ -703,8 +703,8 @@ genrule(
+     mkdir $@
+     for f in $(SRCS); do
+       d="$${f%/*}"
+-      d="$${d#bazel-out*genfiles/}"
+-      d="$${d#*external/eigen_archive/}"
++      d="$${d#bazel-out/*/genfiles/}"
++      d="$${d#bazel-out/*/bin/}"
+ 
+       if [[ $${d} == *local_config_* ]]; then
+         continue
+@@ -716,6 +716,9 @@ genrule(
+         if [[ $${TF_SYSTEM_LIBS:-} == *$${extname}* ]]; then
+           continue
+         fi
++
++        d="$${d#*external/farmhash_archive/src}"
++        d="$${d#*external/$${extname}/}"
+       fi
+ 
+       mkdir -p "$@/$${d}"
+-- 
+2.21.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-1.14.0-0003-systemlibs-jsoncpp-update-header-symlinks-for-jsoncp.patch b/sci-libs/tensorflow/files/tensorflow-1.14.0-0003-systemlibs-jsoncpp-update-header-symlinks-for-jsoncp.patch
new file mode 100644
index 00000000000..3ccdf493373
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-1.14.0-0003-systemlibs-jsoncpp-update-header-symlinks-for-jsoncp.patch
@@ -0,0 +1,27 @@
+From 22275f7dbfa2d483ed4778427b137e83144bac3d Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Fri, 2 Aug 2019 20:06:33 +0800
+Subject: [PATCH 3/3] systemlibs: jsoncpp: update header symlinks for jsoncpp
+ 1.9
+
+Signed-off-by: Jason Zaman <jason@perfinion.com>
+---
+ third_party/systemlibs/jsoncpp.BUILD | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/third_party/systemlibs/jsoncpp.BUILD b/third_party/systemlibs/jsoncpp.BUILD
+index 526fd0c418..7d54f9289b 100644
+--- a/third_party/systemlibs/jsoncpp.BUILD
++++ b/third_party/systemlibs/jsoncpp.BUILD
+@@ -6,6 +6,8 @@ filegroup(
+ )
+ 
+ HEADERS = [
++    "include/json/allocator.h",
++    "include/json/assertions.h",
+     "include/json/autolink.h",
+     "include/json/config.h",
+     "include/json/features.h",
+-- 
+2.21.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-1.14.0-0004-pkgconfig-generate-tensorflow_cc-pkg-config-entry.patch b/sci-libs/tensorflow/files/tensorflow-1.14.0-0004-pkgconfig-generate-tensorflow_cc-pkg-config-entry.patch
new file mode 100644
index 00000000000..5e65c413e77
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-1.14.0-0004-pkgconfig-generate-tensorflow_cc-pkg-config-entry.patch
@@ -0,0 +1,46 @@
+From a8144189b257d515f1eb72a4b50098e9f4abd4ea Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Fri, 2 Aug 2019 22:21:25 +0800
+Subject: [PATCH 4/5] pkgconfig: generate tensorflow_cc pkg-config entry
+
+Signed-off-by: Jason Zaman <jason@perfinion.com>
+---
+ tensorflow/c/generate-pc.sh | 18 ++++++++++++++++--
+ 1 file changed, 16 insertions(+), 2 deletions(-)
+
+diff --git a/tensorflow/c/generate-pc.sh b/tensorflow/c/generate-pc.sh
+index 7184ad68fb..a4d51a1b3b 100755
+--- a/tensorflow/c/generate-pc.sh
++++ b/tensorflow/c/generate-pc.sh
+@@ -63,12 +63,26 @@ cat << EOF > tensorflow.pc
+ prefix=${TF_PREFIX}
+ exec_prefix=\${prefix}
+ libdir=\${exec_prefix}/${LIBDIR}
+-includedir=\${prefix}/include
++includedir=\${prefix}/include/tensorflow
+ 
+ Name: TensorFlow
+ Version: ${TF_VERSION}
+ Description: Library for computation using data flow graphs for scalable machine learning
+ Requires:
+-Libs: -L\${libdir} -ltensorflow
++Libs: -L\${libdir} -ltensorflow -ltensorflow_framework
++Cflags: -I\${includedir}
++EOF
++
++cat << EOF > tensorflow_cc.pc
++prefix=${TF_PREFIX}
++exec_prefix=\${prefix}
++libdir=\${exec_prefix}/${LIBDIR}
++includedir=\${prefix}/include/tensorflow
++
++Name: TensorFlow
++Version: ${TF_VERSION}
++Description: Library for computation using data flow graphs for scalable machine learning
++Requires:
++Libs: -L\${libdir} -ltensorflow_cc -ltensorflow_framework
+ Cflags: -I\${includedir}
+ EOF
+-- 
+2.21.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-1.14.0-0005-gen_git_source-builtins-does-not-exist-in-python2.patch b/sci-libs/tensorflow/files/tensorflow-1.14.0-0005-gen_git_source-builtins-does-not-exist-in-python2.patch
new file mode 100644
index 00000000000..fc2e1fe92cb
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-1.14.0-0005-gen_git_source-builtins-does-not-exist-in-python2.patch
@@ -0,0 +1,25 @@
+From bdbe7a572ed248bf8e53df200ffa5e66bf77b16f Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Fri, 2 Aug 2019 22:22:53 +0800
+Subject: [PATCH 5/5] gen_git_source: builtins does not exist in python2
+
+Signed-off-by: Jason Zaman <jason@perfinion.com>
+---
+ tensorflow/tools/git/gen_git_source.py | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/tensorflow/tools/git/gen_git_source.py b/tensorflow/tools/git/gen_git_source.py
+index 6cefd3900b..4d52c1fccf 100755
+--- a/tensorflow/tools/git/gen_git_source.py
++++ b/tensorflow/tools/git/gen_git_source.py
+@@ -26,7 +26,6 @@ NOTE: this script is only used in opensource.
+ from __future__ import absolute_import
+ from __future__ import division
+ from __future__ import print_function
+-from builtins import bytes  # pylint: disable=redefined-builtin
+ import argparse
+ import json
+ import os
+-- 
+2.21.0
+

diff --git a/sci-libs/tensorflow/tensorflow-1.14.0.ebuild b/sci-libs/tensorflow/tensorflow-1.14.0-r1.ebuild
similarity index 94%
rename from sci-libs/tensorflow/tensorflow-1.14.0.ebuild
rename to sci-libs/tensorflow/tensorflow-1.14.0-r1.ebuild
index 93428020020..0d050e7f370 100644
--- a/sci-libs/tensorflow/tensorflow-1.14.0.ebuild
+++ b/sci-libs/tensorflow/tensorflow-1.14.0-r1.ebuild
@@ -53,7 +53,7 @@ RDEPEND="
 	dev-db/lmdb
 	dev-db/sqlite
 	dev-libs/icu
-	>=dev-libs/jsoncpp-1.8.4
+	>=dev-libs/jsoncpp-1.9
 	dev-libs/libpcre
 	dev-libs/nsync
 	dev-libs/openssl:0=
@@ -62,7 +62,7 @@ RDEPEND="
 	media-libs/giflib
 	media-libs/libjpeg-turbo
 	media-libs/libpng:0
-	>=net-libs/grpc-1.16.0
+	>=net-libs/grpc-1.22.0
 	net-misc/curl
 	sys-libs/zlib
 	>=sys-apps/hwloc-2
@@ -82,9 +82,9 @@ RDEPEND="
 		>=dev-python/protobuf-python-3.6.0[${PYTHON_USEDEP}]
 		dev-python/six[${PYTHON_USEDEP}]
 		dev-python/termcolor[${PYTHON_USEDEP}]
-		dev-python/grpcio[${PYTHON_USEDEP}]
+		>=dev-python/grpcio-1.22.0[${PYTHON_USEDEP}]
 		>=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
-		>=net-libs/google-cloud-cpp-0.9.0
+		>=net-libs/google-cloud-cpp-0.10.0
 		>=sci-libs/keras-applications-1.0.6[${PYTHON_USEDEP}]
 		>=sci-libs/keras-preprocessing-1.0.5[${PYTHON_USEDEP}]
 		>=sci-visualization/tensorboard-1.13.0[${PYTHON_USEDEP}]
@@ -112,7 +112,7 @@ BDEPEND="
 	)
 	!python? ( dev-lang/python )
 	python? (
-		dev-python/grpcio-tools
+		>=dev-python/grpcio-tools-1.22.0
 	)"
 REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
 
@@ -120,6 +120,10 @@ S="${WORKDIR}/${MY_P}"
 
 PATCHES=(
 	"${FILESDIR}/tensorflow-1.14.0-0001-systemlibs-unbundle-enum34.patch"
+	"${FILESDIR}/tensorflow-1.14.0-0002-install_headers-fix-paths-of-generated-headers.patch"
+	"${FILESDIR}/tensorflow-1.14.0-0003-systemlibs-jsoncpp-update-header-symlinks-for-jsoncp.patch"
+	"${FILESDIR}/tensorflow-1.14.0-0004-pkgconfig-generate-tensorflow_cc-pkg-config-entry.patch"
+	"${FILESDIR}/tensorflow-1.14.0-0005-gen_git_source-builtins-does-not-exist-in-python2.patch"
 )
 DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
 CHECKREQS_MEMORY="5G"
@@ -328,7 +332,7 @@ src_install() {
 	# Generate pkg-config file
 	${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
 	insinto /usr/$(get_libdir)/pkgconfig
-	doins ${PN}.pc
+	doins ${PN}.pc ${PN}_cc.pc
 
 	for l in libtensorflow{,_framework,_cc}.so; do
 		dolib.so bazel-bin/tensorflow/${l}


^ permalink raw reply related	[flat|nested] 11+ messages in thread

* [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/
@ 2019-12-02 14:50 Jason Zaman
  0 siblings, 0 replies; 11+ messages in thread
From: Jason Zaman @ 2019-12-02 14:50 UTC (permalink / raw
  To: gentoo-commits

commit:     9b559b8410a1de974fab323fb82fcc86024ef944
Author:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
AuthorDate: Fri Nov 29 09:56:29 2019 +0000
Commit:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
CommitDate: Mon Dec  2 14:49:39 2019 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=9b559b84

sci-libs/tensorflow: bump 2.1.0_rc0

Package-Manager: Portage-2.3.79, Repoman-2.3.16
Signed-off-by: Jason Zaman <perfinion <AT> gentoo.org>

 sci-libs/tensorflow/Manifest                       |   6 +
 .../tensorflow-2.1.0-cuda_10.2_support_bin2c.patch |  12 +
 .../files/tensorflow-2.1.0-external_libs.patch     |  26 ++
 sci-libs/tensorflow/tensorflow-2.1.0_rc0.ebuild    | 358 +++++++++++++++++++++
 4 files changed, 402 insertions(+)

diff --git a/sci-libs/tensorflow/Manifest b/sci-libs/tensorflow/Manifest
index 9645f0bf601..8d60bc809a9 100644
--- a/sci-libs/tensorflow/Manifest
+++ b/sci-libs/tensorflow/Manifest
@@ -8,11 +8,13 @@ DIST bazel-skylib.0.8.0.tar.gz 72941 BLAKE2B 1928989d3e5dc338fee974c5592a524adb4
 DIST bazel-toolchains-3f8c58fe530fedc446de04673bc1e32985887dea.tar.gz 416360 BLAKE2B 4d7960302cb70247f8166647b991d74aa0514d218c9de4c97fe267ecb789e9966cfd8e546fe11661c92034df7a8de72596c5c5ddea10d145d6783cdc5aba2f9b SHA512 9a810b0a70f81fe3b9a1f23b2d171bbe723f7a33374b2caf7ce398572965f21b9de00791122c32b36e68a572b1b8982be378730f1c9f22bc375ae5f5c1c32873
 DIST bazel-toolchains-92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz 660374 BLAKE2B 549292aa3ad8a6e01c29391a123f3cbc84d1f714ff9c68bfc6c8df07fa0ba3b19c22e61627f227a8a484f6d7d6c59935fd7560c447559ff5e774ddf1437eefe1 SHA512 83531a2646a9f63557eff6fefaf551ee7e9f0ccdb31374d7a17c35b9f9d844b9b405597972fcd31b2b68e9da312511bbfd9c8fbb75222f5924e1d2500cd1c2a6
 DIST bazel-toolchains-94d31935a2c94fe7e7c7379a0f3393e181928ff7.tar.gz 505681 BLAKE2B f16225eb11859f487335bddca115c9c2a05731f4bfe4240609a47e17b706d9a25b6b363b61404b4d5df0471e4dd9b4b9befbf78dbcb1c7c11e3f590039ff92b2 SHA512 7723baf2c710adb063375a33fd64e3941fae7e98c824764c266d5554cc0b0cbb1c91a5c2a9058bb4c8766ec404ca556199ce22965a6de4baee09cb740b7671c9
+DIST bazel_skylib-0.9.0.tar.gz 84644 BLAKE2B 40b69a524e632ea00d34b9cf805b882ac4c549dd465ef37f28e54729ff1b614a7610a4895cede4e0c938edba5b19f781f3c0bfdb62bde628ee0af21d6b679546 SHA512 af4bcb641fed05d15f2d0ffe0e3abdfe9c7a5deb9c53064992d6605d224ded1648b1bdfa293e161028913f9f2c178c68cc38bc16028892ca5b0f52b4e645209d
 DIST bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz 464906 BLAKE2B 033d76b8081f4f987e64875ad5c8e7b8f894ec6be58c52ee02c4d31d4480fee02f3f432ea9c4630ad3f5d1163f820aff37f6493da797ec51b148b361ab3c8b25 SHA512 2cd841f4530503ed31fa6425cb749ef29f8a1071b5d55594644303233e58455783cb02402bc23d7104ef036745733087d43075a1fcdab2ac96cd1a9872a6ea4a
 DIST bazelbuild-rules_closure-cf1e44edb908e9616030cc83d085989b8e6cd6df.tar.gz 462798 BLAKE2B f461635702cd881828cf401c78fe0e64398c015d7f7824b369b0808d54224c633b6d8b699000e9e481ba094a93e01c33cc4360a541b2d520db10d3e9ef81007e SHA512 e85b13a50cbec60e4e574de9a3d3b646ca4bf045c55afcb41f2fd671202aac9e6d3e91c618f8923986ae142e615624718f3df0d1edb33a90e334f29667888d39
 DIST bazelbuild-rules_closure-dbb96841cc0a5fb2664c37822803b06dab20c7d1.tar.gz 435984 BLAKE2B 1ec325861e5e5292b2153119beff3b3be743b4833adf3571eab1a066fef06a61131ef0a761e6f8ba4bf978e7c1c073f197f48a507cd1259ee346d4209129211d SHA512 13991822624e5bdcc2424299acb1b9ce12880f46ebbf2065d7d19a66c77f1a6802862a7ea96334fc91a1c85cff516fd32a493320f0a4d98f19e9d33e5f887986
 DIST bazelbuild-rules_docker-v0.10.0.tar.gz 549649 BLAKE2B e7a537b21138a5c5d9ce360e46238f57c917d2dbf5dd17887607402227cbe7c5d68aead9db0ecdb74c09eed9dac26eb86e004a9020c251152d15beb48be0e0d7 SHA512 7802107f8520c88646728800637cce54dbcefc54aa4361288a5b46e403e41998bc23e16fbe94f23a2ca52e5c33fc255af65846963c2fd0139ca1195435ce1b03
 DIST bazelbuild-rules_swift.0.11.1.tar.gz 107453 BLAKE2B 8cd686b945b5c9de9c8bfcaca38cd7c513bb8b55b4584bc569463997e7c5abcac3be4a5a31ccf95a494d17f3078ec9b1052497dba228fa3159a0c0be2a83eb88 SHA512 3479d6d8ce9111a39b9ef2e014399abc904e432841570dd0380104d80e4d3a74b6ff5cc6d2b250bcd15b152efa29d08f9ed092f70fd8bafa6deaa6f847d7f626
+DIST bazelbuild-rules_swift.0.12.1.tar.gz 111674 BLAKE2B 7c116d6098aa0725034dce4c83628c31c85c36a6331b44636deb2efe0f689a5baee8081b33d65e3727a83bca865bcabedd6f8fa1fdb60e901a9b60840c52c018 SHA512 6136feb3dc0074f82500000cf5335f046f21ea3a1edcb4dcb22228c82b56ed6a334e4b03e4e26a4d25549581abcf7df22b60a2bccf823a51bf270051fe18d489
 DIST bazelbuild-rules_swift.0.9.0.tar.gz 105918 BLAKE2B 8f1bd8f5a468e3b37c614cf498a6cd8ceca55b255d6f096ebf88cd77e95148eab3fb5a128ed93b40f6d1ee8988b8dfd300fdf7061fc4ed4f1fb4ab25446ebe58 SHA512 68e985fa30b865317ccf59473cb52873d65159b866a54e35b3fbf83c135f3c750752663e962decbc8ebcd17b5784867d4d4767dd0b706f1182d4715291d8268b
 DIST cub-1.8.0.zip 602396 BLAKE2B a5e302a52e04f85ae8e1836c59e799532a8b961a8d10c08fe1241c9045f88e31f1ebda6d26124b85c6df40968e1c7b3985a39961bf7614535aafcab2da1a576a SHA512 6167c5be94989f88e05a07508cf69cf2c6b22a5ac733804043daa46bd86a44a15a758b1ffb811bab28b82039d027516ed85b3d9f7677a740a7e00ec9c2f81aed
 DIST double-conversion-3992066a95b823efc8ccc1baf82a1cfc73f6e9b8.zip 7071029 BLAKE2B 722c190c941b169a3292ce2141c59b8eb1bcc5edd2ed09669592dddb9354e4c6c094dafe02e5a154f215aec776f9f7cebae36b9b0317b10b707282b1ca9449a5 SHA512 1db0d4805c80e9a08d9a581051d8708ddbfe1d74fd820dccde2c2405c45c06a861bd5b535b25d191cd01064e268f3fe71e58bef16fd39471812aea6c2f97da43
@@ -20,6 +22,7 @@ DIST eigen-049af2f56331.tar.gz 2485848 BLAKE2B 9d8a91782c9d40b7a6df101d2f43df25b
 DIST eigen-8071cda5714d.tar.gz 2486211 BLAKE2B d5c2bc19d66b4e8508e260ba34a7ebf64c605d0eba3363bf853baaf1329ded80511ee36d54db3e8921eb62a0eab3c3ad2a2d4eac39b5108c2415b20812d998c0 SHA512 364691a5db9d2bd4285e5d5d9b9b2326ffd2138868c33bc2b98975da2aeee03bec7b40654ecc20e0ecc34b2a6d45257c7b9f0055b7310c590cac4c951c2aa635
 DIST eigen-9f48e814419e.tar.gz 2430237 BLAKE2B 2d9df1a23ae049e57366fa6838ee8d4b3cecf2d859ec5ffa07bf18878976a2bd99d7dd94bb08f484a8941b5cf8ac091f837a533f081232c70a45ee0583143ba3 SHA512 ee6d8773d910dbf6798cb849ef7611f0f7aaef9ed68216e91c65bd996b8a92b6c107454f6d8599d83bfa6dede7144c83924de8479a9138c7b456b086cd527ebf
 DIST eigen-a0d250e79c79.tar.gz 2468318 BLAKE2B 8799adc1a13612820251cd22a7ddc347ee4711d583561a5df9735bd22ab727c0d8e7fdc894cd7e83b3b972d5adcb86f033bc089e1a4de5fe4e6257f78df88957 SHA512 864646a0b3fd8287fbf6943f4cc7f692a883e6164add21c97e98372641b32e5ceffadc825178b9d11e9504377b837d0e54a264b7a53b28e4f9fc2a23c616d2b3
+DIST eigen-afc120bc03bd.tar.gz 2546157 BLAKE2B 72e051d77cd22c26a4203cf244e87b1700b4dce664d7ee5f80d0dd86eb5557c93e57c50664f1d79b13796a77c5ccdf72203ec66e9376d383b5a3b056326c7b61 SHA512 61f57a22d2a7947c14cfde87946dc41528df696884502a13000ee5c9b964672a5a8d8c10f505f28ed7a2d39deb820c6818d191dc3ce13215b8b9775b33da2ed8
 DIST farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz 467122 BLAKE2B 8b9dd426f4b9f732df6c8c09d868d1b1cc006c395b1525c46ea91c75318b723358c8e64bb0d86a73aace2032eded93f0d80cc7c2c77fddd6a39e3402ab4f2cb7 SHA512 7f5110514a8cdc7ce48d33fd94ad67d96a24e42e11b203e4d0986f4190545f051b2a63f8e09f2ac38c3aa4a5c62d9a0e02cd8ce02045037381fb62a99f0dcd00
 DIST gemmlowp-12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip 935628 BLAKE2B 09bd1dd1e367b1e5c4fe9a9525449c09460f19168bc03c57ad563aa27ec74a18f08b620a3dab6da4fa1a1448cdc7ce8bc8a089becc033b53fff4cc07593315ce SHA512 bd42a364f718f3b077cd9c840d626bfc1fd92ace98ce43329d8fa54700c9d28cb47568b6567e9b081b340af7be621c44be1ce06d4b8dccc4fb5d82c35b12f951
 DIST gemmlowp-38ebac7b059e84692f53e5938f97a9943c120d98.zip 916632 BLAKE2B 0bde9faf0e185fc69999173eb0b5a2e106d252ee1bc6caf38b1adc27bde96e4e2b52a3d723c8b1af922386c0e933bfcc674be974cee059c1d9b16ff6ac631c3e SHA512 eb141182dff058048c559723e6aa7127244dcc6a5a0bfd1250bbb262f34378dccf15b1b930cf46dcb6ac22ac16e3fa2781cca63cd93865506a36ab7e463aa537
@@ -27,11 +30,13 @@ DIST highwayhash-fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz 138574 BLAKE2B
 DIST kissfft-36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz 44744 BLAKE2B aeea5d23c122417ddb70a44354890242c4fb61c456fba9af79054293a1355939af51a681760ac27be55423f888649fd2826e634422c7e9cb3749a9d7f50ef990 SHA512 30f4fcd351bbcb950a27e2a77ca100ccdfd37deb17a05e7e31c3e8f1c56d7c626ab81bbd7497484e6b512e41f7b1bed714722a194b2218c669b297911101df7b
 DIST llvm-7a7e03f906aada0cf4b749b51213fe5784eeff84.tar.gz 50144723 BLAKE2B b744756c9782e5506fd6628531409e305bd9ce9a79f1776c4fe674cec5b01a86ab1793dc602f704d295587c3d769d622402a4fe07c0d66269c7f610178693bce SHA512 43f185c8dd38ae7cb048bb9c98fb94e1f43ce66764b43a7141a06a1a0b011389fa139f34b7eaee72c39b8f44aeeff3e7a2189c11d10f8f8a93e9792a1acf2b5f
 DIST llvm-b7d166cebcf619a3691eed3f994384aab3d80fa6.tar.gz 49997477 BLAKE2B 66cbd71a4ad65768fb39fd622efd54a7821b83eb105871e82e0b99074960cb4ebd8197ef984b0be29d5178a7ce3c0d4839d2d611c96fe1507227f7f28fd7e156 SHA512 79c6d4986cedfc132f085d2794f295fdbefa772cb53d3870963d4851ea15ce8e2863aa50c903930abaff53d818ad97acfe8acd1c923ccbe26e3a99ac2397ae9f
+DIST llvm-ecc999101aadc8dc7d4af9fd88be10fe42674aa0.tar.gz 115788302 BLAKE2B ea18d6cb3cc8311a78e543996d88831e1ecf1dec58b7c890a80cd8fce86e1c52244b9cfc0d7ce0867480465c2d914555c16cf003bfdcf90056d63e8f4e0ea0c0 SHA512 5422762ffe1022ad4588bcf2bdce1a041c37b5edffed70fcefdd911b2dbf8b52cba9ec57670d48d7b69be6b71e354d333d6eaa5ac5260df56ed8ab18fd98fe88
 DIST nvidia-nccl-0ceaec9cee96ae7658aa45686853286651f36384.tar.gz 107044 BLAKE2B fd018b1d40977add357e98382f41e3fa6456bdc9a181e2d6cc7a3bc7fb98d242b3216b5ed0c40323bbcc006ab5221b49819ebc1dccd0c8dd700d9504f085eed1 SHA512 e66f323abb68feeec41927cc23e3a85d82fd1b7105a6232626edba95acb9eb22b3fb8b28d1a7adb536c47c936f68271fe5d8f66d3059e3128608ad826dd7622e
 DIST nvidia-nccl-f93fe9bfd94884cec2ba711897222e0df5569a53.tar.gz 95617 BLAKE2B 023788db444b1a50f01ab16f126c920f530a6ff5bf2a9327626db45f985b4093c1024d16d43cdee9f0b3ee5c8b7d104f2a6b1dab0460c6b1998b42a0354f6f27 SHA512 d6b4dd4d6c7c9ca2dcf0699055c9193d4b46c0c5155e0108db38094577c92f954f604b2e83ca320873288e0010eaa3c5812f05f7fafe5112bebbca78e56cb279
 DIST oourafft-20061228.tgz 72213 BLAKE2B 4a3ac2b4c0bc3275b5743df59241e1cdbd0200371c153ddf54ef6c7c4ec523ee6560547e2d5ef9f3200037c0635bf41c18991ac35f271b1e600d0dbd65d1a9a7 SHA512 f1ceac00cb7b9eb8f625eee7f1f5eea8af363343589a344226628d68baf668c176e6c23b7f904c4e682330352eaa0cd5d00731340d208e94c9657b8f85ae2240
 DIST oourafft2d-20061228.tgz 54434 BLAKE2B 382dd5787155b877a2ad1d624596afb5dbbe146a7aedf044d7f016949deedac871a512a030b5e6ac9ea5310e216788ffb99af1dd9d2a0de3f1ad72d9e5e7a3db SHA512 af993f68e8e1eb3cb927a51e86da8f74cfafc912a7cd055515e50fe543dd19ab5a6f7b1c2be4a55d6f4a0e5d766ead34c3be4c5705be6353f78cb2a55bd5cf16
 DIST pybind11-v2.3.0.tar.gz 564520 BLAKE2B a7c1485aa8f63a00eccd3103cf9f531a080b049be02e6182b30503ae6dd6c55fc517684ed9d92aaec244efd5f77336c68d1ea546833f7f0ae2fd7b090c19a02d SHA512 04542fe9dca2ffdb86c15698a4bbc041446a9f11970092e89a482ae0ff64c932f62fff8f437b98bdaa006c1fccabddd79f0c7e376f27daed2150b1d2f988e8aa
+DIST re2-506cfa4bffd060c06ec338ce50ea3468daa6c814.tar.gz 316237 BLAKE2B 8a975bc3e78b690f70944651c7022c6d946f40b2eb5fe01b2e203141f94b890cf1ad5c72af8e7c6ff3c5242d42af5a8657ed69e3c74d9ac081beb15de6334c0c SHA512 e3e00e92700d08bc55ba0e96148ddf1e00de2d329c1fdac08691ff15976937829f66dbc84af886a4b749ad66ba4ab208d66811e97e33bb9c2d7359735406c450
 DIST rules_docker-a9bb1dab84cdf46e34d1b34b53a17bda129b5eba.tar.gz 446437 BLAKE2B 070e1028625c74e6fbd092a3f8c6cf6de3b98d8999fdd69527674cfc55f00bd7638f7cb1eba41d496d1de3307b81b9dfd832d7d4bc65f9d98cd53714cb24bb02 SHA512 e43c013027a7235e85c263d8cfc40f9af77f54d054665116bb8641ed7f60d077b9107aa97da5b7e102077b68be7d2cbe401091016bd46f8828ae159659ce9608
 DIST rules_docker-b8ff6a85ec359db3fd5657accd3e524daf12016d.tar.gz 460121 BLAKE2B bca31bbdd9069647fa9560002e63a404e2bd4d4ebd19d90e28d08f797f8f63157cc127dab26b996bb976e50f099928a91c1d6343427cb85048c6ca2b47c0c7e2 SHA512 461a663189e5995e31ee8175ee260e1ad06b00ee8a7548b9d3a946a2de693311031ec6eba89e3bad527f5d8f76ed25626ef4ea05a8d6e7579932dc1be3b0a6ba
 DIST tensorflow-1.13.1.tar.gz 31614228 BLAKE2B 3e78309744747afd5a0929eebccb414efe51320f9291424ef6e3cb848f265aaac5ff16a7064c7aac8094f06b1edb61aa9fe22a55c28c23cfc4e2a6f640f4f12d SHA512 1e5c020a82b0b502c9c6fc9439905e19e8f1fb07e32740a5e2c70f4a3565c8c87053ccdcad360e95550dad648154e399ca8e352fb97a1788acfec3b87bf323bb
@@ -40,5 +45,6 @@ DIST tensorflow-1.14.0.tar.gz 41335204 BLAKE2B d91d84c5d86c838cc659c45e28cf3c8a9
 DIST tensorflow-1.15.0-python-license.rst.txt 45132 BLAKE2B 8a8ace4b64fd9eda1932272704dd4fea002d2562cd9c2c94470d3ff5de58bb171ff849d3f998ea90cd779d1084d39af1267aca8400b74b701205d2e76e6a2628 SHA512 c79c42f4ceecf33c8f8778ca17b53595b75d5d0bdf30b11058cf01dd70a6351f530ca9dd846460aba14edc1d5876f7bf6fa0f9f49f7915590b0f54d267ce3856
 DIST tensorflow-1.15.0_rc0.tar.gz 46177850 BLAKE2B a4cb339018d04da26993400e99bacbe8c88dd68e17158b3effac125ee90b73d441084d530359912c40d1981c7688511ad41117067428233e9310c1fc8617f1d0 SHA512 f9ba5f2c952b7d8be0f3e2b78336513efa07d5323bc28217335391538577ba2b91687cf92f759d4ec03b02dd1c2961de4b54df205e7952c706f5b632029b7c7d
 DIST tensorflow-2.0.0.tar.gz 46176997 BLAKE2B 761e91a774f61b50459240a83fe52f723a76c2cc3eaf7c371684849cb7365b94fb3743ab8de9adffe64bef0f70da2bb0b7e6426a3f402e436eff841d3edefe8d SHA512 c3a2cb9673d2ade8e83961f7d944165123d0e4d94e6ca4d8fdee4f02f536893b879bbbf78d3ad5e557467b58f97f7194f2d9cfc64d7d05540969be732070eecc
+DIST tensorflow-2.1.0_rc0.tar.gz 41625131 BLAKE2B bc286f2f8d76a38973706acf3d839a4f54843b2f37ba5485eb149beacd55c7ac13660592acbc2413f8e60242c4a34b90cea8f1d9708d3f086e7cae73cc338832 SHA512 1481af1ed43ff03f6a0eeaad9896e2ec021d763bc69bf03e33353d6188074fdfaa1b6c3bba1d4e2496207f468e6a555fe2c962ae15eddd30ef872a3d76e19911
 DIST tensorflow-patches-1.14.0-r2.tar.bz2 7213 BLAKE2B 51719fc02049d564b7e5c00a76375af9a7e8dfc73753ba5f0d53163dc1fa00d23bb7ed03553a0bce68b8b90cd5bfe21d29e7a57a827db2c68ceef65f3cb0e925 SHA512 35e1a410060cdde99df148bfd06f042b91e021f7a692ae9c0100c67555de553bc248d73db762ed0239117296fb1147e2b67cdf0dd3dc94dee6fe9804c08a7360
 DIST tensorflow-python-license.rst.txt 45132 BLAKE2B 770ddd87adc745a021daac01a284131825a33f176002a28b20a2251246183bf7ab4686305620678285e4a216a37f01b3ad953fe7864fa79611cccde156d69ab6 SHA512 2d5a3783ec340f24797e89c78007ee95ab753eb5ee8198c739544f9e1159895f7abe816d6b3c66620f8d7b64641d2a7f44ff0a5b4f2f941928f4733157c7995e

diff --git a/sci-libs/tensorflow/files/tensorflow-2.1.0-cuda_10.2_support_bin2c.patch b/sci-libs/tensorflow/files/tensorflow-2.1.0-cuda_10.2_support_bin2c.patch
new file mode 100644
index 00000000000..4cc2f608d5d
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.1.0-cuda_10.2_support_bin2c.patch
@@ -0,0 +1,12 @@
+diff --git a/third_party/nccl/build_defs.bzl.tpl b/third_party/nccl/build_defs.bzl.tpl
+index 5719139855..5f5c3a1008 100644
+--- a/third_party/nccl/build_defs.bzl.tpl
++++ b/third_party/nccl/build_defs.bzl.tpl
+@@ -113,7 +113,6 @@ def _device_link_impl(ctx):
+             "--cmdline=--compile-only",
+             "--link",
+             "--compress-all",
+-            "--bin2c-path=%s" % bin2c.dirname,
+             "--create=%s" % tmp_fatbin.path,
+             "--embedded-fatbin=%s" % fatbin_h.path,
+         ] + images,

diff --git a/sci-libs/tensorflow/files/tensorflow-2.1.0-external_libs.patch b/sci-libs/tensorflow/files/tensorflow-2.1.0-external_libs.patch
new file mode 100644
index 00000000000..194cdc16651
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.1.0-external_libs.patch
@@ -0,0 +1,26 @@
+diff --git a/WORKSPACE b/WORKSPACE
+index babb14b509..bcdd4f46e2 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -67,7 +67,7 @@ http_archive(
+ http_archive(
+     name = "bazel_skylib",
+     sha256 = "1dde365491125a3db70731e25658dfdd3bc5dbdfd11b840b3e987ecf043c7ca0",
+-    urls = ["https://github.com/bazelbuild/bazel-skylib/releases/download/0.9.0/bazel-skylib.0.9.0.tar.gz"],
++    urls = ["https://github.com/bazelbuild/bazel-skylib/releases/download/0.9.0/bazel_skylib-0.9.0.tar.gz"],
+ )  # https://github.com/bazelbuild/bazel-skylib/releases
+ http_archive(
+     name = "com_github_apple_swift_swift_protobuf",
+diff --git a/third_party/systemlibs/syslibs_configure.bzl b/third_party/systemlibs/syslibs_configure.bzl
+index 8619cddfdd..4b57df0cbe 100644
+--- a/third_party/systemlibs/syslibs_configure.bzl
++++ b/third_party/systemlibs/syslibs_configure.bzl
+@@ -27,7 +27,7 @@ VALID_LIBS = [
+     "grpc",
+     "hwloc",
+     "icu",
+-    "jpeg",
++    "libjpeg_turbo",
+     "jsoncpp_git",
+     "keras_applications_archive",
+     "lmdb",

diff --git a/sci-libs/tensorflow/tensorflow-2.1.0_rc0.ebuild b/sci-libs/tensorflow/tensorflow-2.1.0_rc0.ebuild
new file mode 100644
index 00000000000..86182bded16
--- /dev/null
+++ b/sci-libs/tensorflow/tensorflow-2.1.0_rc0.ebuild
@@ -0,0 +1,358 @@
+# Copyright 1999-2019 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=7
+
+DISTUTILS_OPTIONAL=1
+PYTHON_COMPAT=( python{3_5,3_6,3_7} )
+MY_PV=${PV/_rc/-rc}
+MY_P=${PN}-${MY_PV}
+
+inherit bazel check-reqs cuda distutils-r1 flag-o-matic toolchain-funcs
+
+DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
+HOMEPAGE="https://www.tensorflow.org/"
+
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+IUSE="cuda mpi +python xla"
+CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
+for i in $CPU_USE_FLAGS_X86; do
+	IUSE+=" cpu_flags_x86_$i"
+done
+
+# distfiles that bazel uses for the workspace, will be copied to basel-distdir
+bazel_external_uris="
+	https://storage.googleapis.com/mirror.tensorflow.org/www.kurims.kyoto-u.ac.jp/~ooura/fft2d.tgz -> oourafft2d-20061228.tgz
+	https://bitbucket.org/eigen/eigen/get/afc120bc03bd.tar.gz -> eigen-afc120bc03bd.tar.gz
+	https://github.com/abseil/abseil-cpp/archive/43ef2148c0936ebf7cb4be6b19927a9d9d145b8f.tar.gz -> abseil-cpp-43ef2148c0936ebf7cb4be6b19927a9d9d145b8f.tar.gz
+	https://github.com/bazelbuild/bazel-skylib/releases/download/0.8.0/bazel-skylib.0.8.0.tar.gz
+	https://github.com/bazelbuild/bazel-skylib/releases/download/0.9.0/bazel_skylib-0.9.0.tar.gz
+	https://github.com/bazelbuild/bazel-toolchains/archive/92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz -> bazel-toolchains-92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz
+	https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz
+	https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz
+	https://github.com/bazelbuild/rules_swift/releases/download/0.12.1/rules_swift.0.12.1.tar.gz -> bazelbuild-rules_swift.0.12.1.tar.gz
+	https://github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz -> farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz
+	https://github.com/google/gemmlowp/archive/12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip -> gemmlowp-12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip
+	https://github.com/google/highwayhash/archive/fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz -> highwayhash-fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz
+	https://github.com/google/re2/archive/506cfa4bffd060c06ec338ce50ea3468daa6c814.tar.gz -> re2-506cfa4bffd060c06ec338ce50ea3468daa6c814.tar.gz
+	https://github.com/mborgerding/kissfft/archive/36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz -> kissfft-36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz
+	https://github.com/pybind/pybind11/archive/v2.3.0.tar.gz -> pybind11-v2.3.0.tar.gz
+	https://github.com/llvm/llvm-project/archive/ecc999101aadc8dc7d4af9fd88be10fe42674aa0.tar.gz -> llvm-ecc999101aadc8dc7d4af9fd88be10fe42674aa0.tar.gz
+	cuda? (
+		https://github.com/nvidia/nccl/archive/0ceaec9cee96ae7658aa45686853286651f36384.tar.gz -> nvidia-nccl-0ceaec9cee96ae7658aa45686853286651f36384.tar.gz
+		https://github.com/NVlabs/cub/archive/1.8.0.zip -> cub-1.8.0.zip
+	)
+	python? (
+		https://github.com/intel/ARM_NEON_2_x86_SSE/archive/1200fe90bb174a6224a525ee60148671a786a71f.tar.gz -> ARM_NEON_2_x86_SSE-1200fe90bb174a6224a525ee60148671a786a71f.tar.gz
+		https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt
+		https://pypi.python.org/packages/bc/cc/3cdb0a02e7e96f6c70bd971bc8a90b8463fda83e264fa9c5c1c98ceabd81/backports.weakref-1.0rc1.tar.gz
+	)"
+
+SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
+		${bazel_external_uris}"
+
+RDEPEND="
+	app-arch/snappy
+	dev-db/lmdb
+	dev-db/sqlite
+	dev-libs/icu
+	>=dev-libs/jsoncpp-1.9
+	dev-libs/libpcre
+	dev-libs/nsync
+	dev-libs/openssl:0=
+	>=dev-libs/protobuf-3.8.0:=
+	>=dev-libs/re2-0.2019.06.01
+	media-libs/giflib
+	media-libs/libjpeg-turbo
+	media-libs/libpng:0
+	>=net-libs/grpc-1.22.0
+	net-misc/curl
+	sys-libs/zlib
+	>=sys-apps/hwloc-2
+	cuda? (
+		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
+		dev-libs/cudnn
+	)
+	mpi? ( virtual/mpi )
+	python? (
+		${PYTHON_DEPS}
+		>=dev-libs/flatbuffers-1.8.0
+		dev-python/absl-py[${PYTHON_USEDEP}]
+		>=dev-python/astor-0.7.1[${PYTHON_USEDEP}]
+		dev-python/gast[${PYTHON_USEDEP}]
+		>=dev-python/numpy-1.16[${PYTHON_USEDEP}]
+		dev-python/google-pasta[${PYTHON_USEDEP}]
+		dev-python/opt-einsum[${PYTHON_USEDEP}]
+		>=dev-python/protobuf-python-3.8.0[${PYTHON_USEDEP}]
+		dev-python/six[${PYTHON_USEDEP}]
+		dev-python/termcolor[${PYTHON_USEDEP}]
+		>=dev-python/grpcio-1.22.0[${PYTHON_USEDEP}]
+		>=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
+		>=net-libs/google-cloud-cpp-0.10.0
+		>=sci-libs/keras-applications-1.0.8[${PYTHON_USEDEP}]
+		>=sci-libs/keras-preprocessing-1.1.0[${PYTHON_USEDEP}]
+		>=sci-visualization/tensorboard-2.0.0[${PYTHON_USEDEP}]
+	)"
+DEPEND="${RDEPEND}
+	python? (
+		dev-python/mock
+		dev-python/setuptools
+	)"
+PDEPEND="python? (
+		>=sci-libs/tensorflow-estimator-2.0.0[${PYTHON_USEDEP}]
+	)"
+BDEPEND="
+	app-arch/unzip
+	>=dev-libs/protobuf-3.8.0
+	dev-java/java-config
+	dev-lang/swig
+	|| (
+		=dev-util/bazel-0.24*
+		=dev-util/bazel-0.26*
+		=dev-util/bazel-0.27*
+	)
+	cuda? (
+		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
+	)
+	!python? ( dev-lang/python )
+	python? (
+		dev-python/cython
+		dev-python/mock
+		>=dev-python/grpcio-tools-1.22.0
+	)"
+REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
+
+S="${WORKDIR}/${MY_P}"
+
+PATCHES=(
+	"${FILESDIR}/tensorflow-1.15.0_rc0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch"
+	"${FILESDIR}/tensorflow-2.1.0-external_libs.patch"
+)
+DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
+CHECKREQS_MEMORY="5G"
+CHECKREQS_DISK_BUILD="10G"
+
+get-cpu-flags() {
+	local i f=()
+	# Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
+	for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
+		use cpu_flags_x86_${i} && f+=( -m${i/_/.} )
+	done
+	use cpu_flags_x86_fma3 && f+=( -mfma )
+	echo "${f[*]}"
+}
+
+pkg_setup() {
+	ewarn "TensorFlow 2.0 is a major release that contains some incompatibilities"
+	ewarn "with TensorFlow 1.x. For more information about migrating to TF2.0 see:"
+	ewarn "https://www.tensorflow.org/guide/migrate"
+
+	local num_pythons_enabled
+	num_pythons_enabled=0
+	count_impls(){
+		num_pythons_enabled=$((${num_pythons_enabled} + 1))
+	}
+	use python && python_foreach_impl count_impls
+
+	# 10G to build C/C++ libs, 5G per python impl
+	CHECKREQS_DISK_BUILD="$((10 + 6 * ${num_pythons_enabled}))G"
+	check-reqs_pkg_setup
+}
+
+src_unpack() {
+	# Only unpack the main distfile
+	unpack "${P}.tar.gz"
+	bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+
+	append-flags $(get-cpu-flags)
+	bazel_setup_bazelrc
+
+	if ver_test "$(cuda_toolkit_version)" -ge "10.2"; then
+		eapply "${FILESDIR}/tensorflow-2.1.0-cuda_10.2_support_bin2c.patch"
+	fi
+
+	default
+	use python && python_copy_sources
+
+	use cuda && cuda_add_sandbox
+}
+
+src_configure() {
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+
+	do_configure() {
+		export CC_OPT_FLAGS=" "
+		export TF_ENABLE_XLA=$(usex xla 1 0)
+		export TF_NEED_OPENCL_SYCL=0
+		export TF_NEED_OPENCL=0
+		export TF_NEED_COMPUTECPP=0
+		export TF_NEED_ROCM=0
+		export TF_NEED_MPI=$(usex mpi 1 0)
+		export TF_SET_ANDROID_WORKSPACE=0
+
+		if use python; then
+			python_export PYTHON_SITEDIR
+			export PYTHON_BIN_PATH="${PYTHON}"
+			export PYTHON_LIB_PATH="${PYTHON_SITEDIR}"
+		else
+			export PYTHON_BIN_PATH="$(which python)"
+			export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"
+		fi
+
+		export TF_NEED_CUDA=$(usex cuda 1 0)
+		export TF_DOWNLOAD_CLANG=0
+		export TF_CUDA_CLANG=0
+		export TF_NEED_TENSORRT=0
+		if use cuda; then
+			export TF_CUDA_PATHS="${EPREFIX%/}/opt/cuda"
+			export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"
+			export TF_CUDA_VERSION="$(cuda_toolkit_version)"
+			export TF_CUDNN_VERSION="$(cuda_cudnn_version)"
+			einfo "Setting CUDA version: $TF_CUDA_VERSION"
+			einfo "Setting CUDNN version: $TF_CUDNN_VERSION"
+		fi
+
+		# com_googlesource_code_re2 weird branch using absl, doesnt work with released re2
+		local SYSLIBS=(
+			absl_py
+			astor_archive
+			boringssl
+			com_github_googleapis_googleapis
+			com_github_googlecloudplatform_google_cloud_cpp
+			com_google_protobuf
+			curl
+			cython
+			double_conversion
+			enum34_archive
+			flatbuffers
+			functools32_archive
+			gast_archive
+			gif
+			grpc
+			hwloc
+			icu
+			libjpeg_turbo
+			jsoncpp_git
+			keras_applications_archive
+			lmdb
+			nasm
+			nsync
+			opt_einsum_archive
+			org_sqlite
+			pasta
+			pcre
+			png
+			six_archive
+			snappy
+			swig
+			termcolor_archive
+			wrapt
+			zlib_archive
+		)
+
+		export TF_SYSTEM_LIBS="${SYSLIBS[@]}"
+		export TF_IGNORE_MAX_BAZEL_VERSION=1
+
+		# This is not autoconf
+		./configure || die
+
+		echo 'build --config=noaws --config=nohdfs' >> .bazelrc || die
+		echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
+		echo 'build --incompatible_no_support_tools_in_action_inputs=false' >> .bazelrc || die
+	}
+	if use python; then
+		python_foreach_impl run_in_build_dir do_configure
+	else
+		do_configure
+	fi
+}
+
+src_compile() {
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+
+	if use python; then
+		python_setup
+		BUILD_DIR="${S}-${EPYTHON/./_}"
+		cd "${BUILD_DIR}"
+	fi
+
+	# fail early if any deps are missing
+	ebazel build --nobuild \
+		//tensorflow:libtensorflow_framework.so \
+		//tensorflow:libtensorflow.so \
+		//tensorflow:libtensorflow_cc.so \
+		$(usex python '//tensorflow/tools/pip_package:build_pip_package' '')
+
+	ebazel build \
+		//tensorflow:libtensorflow_framework.so \
+		//tensorflow:libtensorflow.so
+	ebazel build //tensorflow:libtensorflow_cc.so
+
+	do_compile() {
+		ebazel build //tensorflow/tools/pip_package:build_pip_package
+	}
+	BUILD_DIR="${S}"
+	cd "${BUILD_DIR}"
+	use python && python_foreach_impl run_in_build_dir do_compile
+	ebazel shutdown
+}
+
+src_install() {
+	local i j
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+
+	do_install() {
+		einfo "Installing ${EPYTHON} files"
+		local srcdir="${T}/src-${MULTIBUILD_VARIANT}"
+		mkdir -p "${srcdir}" || die
+		bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
+		cd "${srcdir}" || die
+		esetup.py install
+
+		# libtensorflow_framework.so is in /usr/lib already
+		python_export PYTHON_SITEDIR PYTHON_SCRIPTDIR
+		rm -f "${D}/${PYTHON_SITEDIR}"/${PN}/lib${PN}_framework.so* || die
+		rm -f "${D}/${PYTHON_SITEDIR}"/${PN}_core/lib${PN}_framework.so* || die
+		python_optimize
+	}
+
+	if use python; then
+		python_foreach_impl run_in_build_dir do_install
+
+		# Symlink to python-exec scripts
+		for i in "${ED}"/usr/lib/python-exec/*/*; do
+			n="${i##*/}"
+			[[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"
+		done
+
+		python_setup
+		local BUILD_DIR="${S}-${EPYTHON/./_}"
+		cd "${BUILD_DIR}" || die
+	fi
+
+	einfo "Installing headers"
+	ebazel build //tensorflow:install_headers
+	ebazel shutdown
+	insinto /usr/include/${PN}/
+	doins -r bazel-genfiles/tensorflow/include/*
+
+	einfo "Installing libs"
+	# Generate pkg-config file
+	${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
+	insinto /usr/$(get_libdir)/pkgconfig
+	doins ${PN}.pc ${PN}_cc.pc
+
+	for l in libtensorflow{,_framework,_cc}.so; do
+		dolib.so bazel-bin/tensorflow/${l}
+		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1)
+		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3)
+	done
+
+	einstalldocs
+}


^ permalink raw reply related	[flat|nested] 11+ messages in thread

* [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/
@ 2019-12-09  3:03 Jason Zaman
  0 siblings, 0 replies; 11+ messages in thread
From: Jason Zaman @ 2019-12-09  3:03 UTC (permalink / raw
  To: gentoo-commits

commit:     45cffd5350c6e32f774e63434ae12855340b2452
Author:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
AuthorDate: Mon Dec  9 02:57:41 2019 +0000
Commit:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
CommitDate: Mon Dec  9 03:00:05 2019 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=45cffd53

sci-libs/tensorflow: drop old

Package-Manager: Portage-2.3.79, Repoman-2.3.16
Signed-off-by: Jason Zaman <perfinion <AT> gentoo.org>

 sci-libs/tensorflow/Manifest                       |  13 -
 .../tensorflow-1.15.0_rc0-0003-python-deps.patch   |  15 -
 sci-libs/tensorflow/tensorflow-1.14.0-r2.ebuild    | 340 -------------------
 sci-libs/tensorflow/tensorflow-1.15.0_rc0.ebuild   | 359 ---------------------
 4 files changed, 727 deletions(-)

diff --git a/sci-libs/tensorflow/Manifest b/sci-libs/tensorflow/Manifest
index 3dcac2f962e..1fccc3eaf93 100644
--- a/sci-libs/tensorflow/Manifest
+++ b/sci-libs/tensorflow/Manifest
@@ -1,24 +1,18 @@
 DIST ARM_NEON_2_x86_SSE-1200fe90bb174a6224a525ee60148671a786a71f.tar.gz 100612 BLAKE2B 84f81072d3298dde613b4c1ebabf852c67e04b5882a4ca0ed6f218b42e8fa3ea9598140b0c0009e8d6b62d08902622a6d57ee005ec3ad8a808c9fb21aa53430c SHA512 03a289d8099a5c4a5298cba8f516d2edb41220935d8db750970113ca7513372c78241c2c948d0ffcd60e84686e50ba1d7fd0b46efea6881dd320435eca92a7b9
 DIST abseil-cpp-43ef2148c0936ebf7cb4be6b19927a9d9d145b8f.tar.gz 1281523 BLAKE2B 955802c74ed9baf222dcd824d2940147ae89edbc88f12bb8f54839776fe18bb18c0fde23b0d3142bb3be65a57930c96d03fdd243e8da07435eeb04329c31483d SHA512 c2ae1bd789251ec70cd0baafd92521e48fb1648aee8d9d84a8269436300848b6a9d7aead6bef8a12d3a9040f19535ac725ae961f12566500d309bc13dfee26a4
-DIST abseil-cpp-daf381e8535a1f1f1b8a75966a74e7cca63dee89.tar.gz 1275955 BLAKE2B eb6e0f9d55458a4ae6e8b86be89add9b0cc44d1ff5a11f6fa1ec93e2f46885c8ff87b8c1e674e1ba5ac13556ed303963864057f0e258fce41fa753d891846996 SHA512 56a827296e2948b90922766369f2040e04abd03320375240ad32fcc2925cdd1ffea312edcef1009a0c8abd79f521a61e433b7651dd4c2721d2b1ad13f48256de
 DIST backports.weakref-1.0rc1.tar.gz 7761 BLAKE2B 4cb2554a4b71bb1f8c41a790268511e4b382effc7e0328f74346d086de539a177111de9e2dabac19a44b2a1cdbf59a7425e30eee4caa4bfe66b0ca97f1f460a9 SHA512 f37e9eb0d9060d2e1588a941f623460bd4477d7e180b38b5cd46d9c1db1ee094ae63e4f5eeeb422823bf3d067f46c4124cb124a9e4ddb795bc0bfbc4802ab826
-DIST bazel-skylib-0.6.0.tar.gz 29330 BLAKE2B 53d23a81203c08cb5ab4adbd308bb6c9875df0f2d27c6f8832a27397578e9214ebcd95f63bd6c539d534ecb5f2316116063e3e5d1a36986cf94d167d7823234d SHA512 14ce6782509be7d3cb0fe438aaad579ad23bf328d566cab06a2008990041ee098b4602542cb67961d757573a8c272e5dddb143efd756cd6ec40486159329fa91
 DIST bazel-skylib.0.8.0.tar.gz 72941 BLAKE2B 1928989d3e5dc338fee974c5592a524adb492db172b1d2a849ed5f14f91dbb76606335d369e4b15c4eec41adfe3a23cdc5b4f3b2f1351a5e9c25103c002b96a2 SHA512 4729941cedca8ddb876a621b28221d3c17dbb5dd72af27e55f48274208f21db986c5e7cb8adb742c42124bfa8073eb782b49e2557cb3f3f95ca56833fef925c0
 DIST bazel-toolchains-92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz 660374 BLAKE2B 549292aa3ad8a6e01c29391a123f3cbc84d1f714ff9c68bfc6c8df07fa0ba3b19c22e61627f227a8a484f6d7d6c59935fd7560c447559ff5e774ddf1437eefe1 SHA512 83531a2646a9f63557eff6fefaf551ee7e9f0ccdb31374d7a17c35b9f9d844b9b405597972fcd31b2b68e9da312511bbfd9c8fbb75222f5924e1d2500cd1c2a6
 DIST bazel-toolchains-94d31935a2c94fe7e7c7379a0f3393e181928ff7.tar.gz 505681 BLAKE2B f16225eb11859f487335bddca115c9c2a05731f4bfe4240609a47e17b706d9a25b6b363b61404b4d5df0471e4dd9b4b9befbf78dbcb1c7c11e3f590039ff92b2 SHA512 7723baf2c710adb063375a33fd64e3941fae7e98c824764c266d5554cc0b0cbb1c91a5c2a9058bb4c8766ec404ca556199ce22965a6de4baee09cb740b7671c9
 DIST bazel_skylib-0.9.0.tar.gz 84644 BLAKE2B 40b69a524e632ea00d34b9cf805b882ac4c549dd465ef37f28e54729ff1b614a7610a4895cede4e0c938edba5b19f781f3c0bfdb62bde628ee0af21d6b679546 SHA512 af4bcb641fed05d15f2d0ffe0e3abdfe9c7a5deb9c53064992d6605d224ded1648b1bdfa293e161028913f9f2c178c68cc38bc16028892ca5b0f52b4e645209d
 DIST bazelbuild-rules_cc-0d5f3f2768c6ca2faca0079a997a97ce22997a0c.zip 72534 BLAKE2B 0bbcc2b3fefb8df84d9f72b13da59c377fcd657ead378f85f80a587f8750302236212ae5c58819933015e62425126bead0733d2924a567fb9c5cb14fb75271a8 SHA512 1818c555504ba89afff88d75fe8b4e1386b3665a3ad477c26b4e9504c8f52f398b6aaedbad41a4ab8b8d9b3f86e0992e7c83a6da70241685d7b79ba177af1035
 DIST bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz 464906 BLAKE2B 033d76b8081f4f987e64875ad5c8e7b8f894ec6be58c52ee02c4d31d4480fee02f3f432ea9c4630ad3f5d1163f820aff37f6493da797ec51b148b361ab3c8b25 SHA512 2cd841f4530503ed31fa6425cb749ef29f8a1071b5d55594644303233e58455783cb02402bc23d7104ef036745733087d43075a1fcdab2ac96cd1a9872a6ea4a
-DIST bazelbuild-rules_closure-cf1e44edb908e9616030cc83d085989b8e6cd6df.tar.gz 462798 BLAKE2B f461635702cd881828cf401c78fe0e64398c015d7f7824b369b0808d54224c633b6d8b699000e9e481ba094a93e01c33cc4360a541b2d520db10d3e9ef81007e SHA512 e85b13a50cbec60e4e574de9a3d3b646ca4bf045c55afcb41f2fd671202aac9e6d3e91c618f8923986ae142e615624718f3df0d1edb33a90e334f29667888d39
 DIST bazelbuild-rules_docker-v0.10.0.tar.gz 549649 BLAKE2B e7a537b21138a5c5d9ce360e46238f57c917d2dbf5dd17887607402227cbe7c5d68aead9db0ecdb74c09eed9dac26eb86e004a9020c251152d15beb48be0e0d7 SHA512 7802107f8520c88646728800637cce54dbcefc54aa4361288a5b46e403e41998bc23e16fbe94f23a2ca52e5c33fc255af65846963c2fd0139ca1195435ce1b03
 DIST bazelbuild-rules_swift.0.11.1.tar.gz 107453 BLAKE2B 8cd686b945b5c9de9c8bfcaca38cd7c513bb8b55b4584bc569463997e7c5abcac3be4a5a31ccf95a494d17f3078ec9b1052497dba228fa3159a0c0be2a83eb88 SHA512 3479d6d8ce9111a39b9ef2e014399abc904e432841570dd0380104d80e4d3a74b6ff5cc6d2b250bcd15b152efa29d08f9ed092f70fd8bafa6deaa6f847d7f626
 DIST bazelbuild-rules_swift.0.12.1.tar.gz 111674 BLAKE2B 7c116d6098aa0725034dce4c83628c31c85c36a6331b44636deb2efe0f689a5baee8081b33d65e3727a83bca865bcabedd6f8fa1fdb60e901a9b60840c52c018 SHA512 6136feb3dc0074f82500000cf5335f046f21ea3a1edcb4dcb22228c82b56ed6a334e4b03e4e26a4d25549581abcf7df22b60a2bccf823a51bf270051fe18d489
-DIST bazelbuild-rules_swift.0.9.0.tar.gz 105918 BLAKE2B 8f1bd8f5a468e3b37c614cf498a6cd8ceca55b255d6f096ebf88cd77e95148eab3fb5a128ed93b40f6d1ee8988b8dfd300fdf7061fc4ed4f1fb4ab25446ebe58 SHA512 68e985fa30b865317ccf59473cb52873d65159b866a54e35b3fbf83c135f3c750752663e962decbc8ebcd17b5784867d4d4767dd0b706f1182d4715291d8268b
 DIST cub-1.8.0.zip 602396 BLAKE2B a5e302a52e04f85ae8e1836c59e799532a8b961a8d10c08fe1241c9045f88e31f1ebda6d26124b85c6df40968e1c7b3985a39961bf7614535aafcab2da1a576a SHA512 6167c5be94989f88e05a07508cf69cf2c6b22a5ac733804043daa46bd86a44a15a758b1ffb811bab28b82039d027516ed85b3d9f7677a740a7e00ec9c2f81aed
 DIST eigen-049af2f56331.tar.gz 2485848 BLAKE2B 9d8a91782c9d40b7a6df101d2f43df25b5a3622bc02a7b9576929afb1f2f3d7c8b80671d24f1e31670883003b60c4cc0f3c9dadff0430d2ee047eb5122805edd SHA512 ab37106e9dfebbb42d7919c16b301887607080345b7ed63c8e3b8568489cfdc2c3ca0117a77905162cb3ba8324753040c841d2c96521fa4b9486f0778f1962df
 DIST eigen-49177915a14a.tar.gz 2496808 BLAKE2B 4e89b72a2ca121e9d1951a7179434c9eb70104ad45e63ae9811e10e28e32d51386acd2fe6938970404dcada8a377d1b3c07a68a98a68a036a6e85b2cf9ff4b48 SHA512 eb036225b41ffd8cc61ef99d2d22030c0d1e0f87eefcea4dd2b1fdb16a5316e0f4f17527983a00a8a684067654bf1f87c82cfaed5764829778660485ebbfa4b5
-DIST eigen-8071cda5714d.tar.gz 2486211 BLAKE2B d5c2bc19d66b4e8508e260ba34a7ebf64c605d0eba3363bf853baaf1329ded80511ee36d54db3e8921eb62a0eab3c3ad2a2d4eac39b5108c2415b20812d998c0 SHA512 364691a5db9d2bd4285e5d5d9b9b2326ffd2138868c33bc2b98975da2aeee03bec7b40654ecc20e0ecc34b2a6d45257c7b9f0055b7310c590cac4c951c2aa635
-DIST eigen-a0d250e79c79.tar.gz 2468318 BLAKE2B 8799adc1a13612820251cd22a7ddc347ee4711d583561a5df9735bd22ab727c0d8e7fdc894cd7e83b3b972d5adcb86f033bc089e1a4de5fe4e6257f78df88957 SHA512 864646a0b3fd8287fbf6943f4cc7f692a883e6164add21c97e98372641b32e5ceffadc825178b9d11e9504377b837d0e54a264b7a53b28e4f9fc2a23c616d2b3
 DIST eigen-afc120bc03bd.tar.gz 2546157 BLAKE2B 72e051d77cd22c26a4203cf244e87b1700b4dce664d7ee5f80d0dd86eb5557c93e57c50664f1d79b13796a77c5ccdf72203ec66e9376d383b5a3b056326c7b61 SHA512 61f57a22d2a7947c14cfde87946dc41528df696884502a13000ee5c9b964672a5a8d8c10f505f28ed7a2d39deb820c6818d191dc3ce13215b8b9775b33da2ed8
 DIST farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz 467122 BLAKE2B 8b9dd426f4b9f732df6c8c09d868d1b1cc006c395b1525c46ea91c75318b723358c8e64bb0d86a73aace2032eded93f0d80cc7c2c77fddd6a39e3402ab4f2cb7 SHA512 7f5110514a8cdc7ce48d33fd94ad67d96a24e42e11b203e4d0986f4190545f051b2a63f8e09f2ac38c3aa4a5c62d9a0e02cd8ce02045037381fb62a99f0dcd00
 DIST gemmlowp-12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip 935628 BLAKE2B 09bd1dd1e367b1e5c4fe9a9525449c09460f19168bc03c57ad563aa27ec74a18f08b620a3dab6da4fa1a1448cdc7ce8bc8a089becc033b53fff4cc07593315ce SHA512 bd42a364f718f3b077cd9c840d626bfc1fd92ace98ce43329d8fa54700c9d28cb47568b6567e9b081b340af7be621c44be1ce06d4b8dccc4fb5d82c35b12f951
@@ -28,17 +22,10 @@ DIST llvm-7a7e03f906aada0cf4b749b51213fe5784eeff84.tar.gz 50144723 BLAKE2B b7447
 DIST llvm-b7d166cebcf619a3691eed3f994384aab3d80fa6.tar.gz 49997477 BLAKE2B 66cbd71a4ad65768fb39fd622efd54a7821b83eb105871e82e0b99074960cb4ebd8197ef984b0be29d5178a7ce3c0d4839d2d611c96fe1507227f7f28fd7e156 SHA512 79c6d4986cedfc132f085d2794f295fdbefa772cb53d3870963d4851ea15ce8e2863aa50c903930abaff53d818ad97acfe8acd1c923ccbe26e3a99ac2397ae9f
 DIST llvm-ecc999101aadc8dc7d4af9fd88be10fe42674aa0.tar.gz 115788302 BLAKE2B ea18d6cb3cc8311a78e543996d88831e1ecf1dec58b7c890a80cd8fce86e1c52244b9cfc0d7ce0867480465c2d914555c16cf003bfdcf90056d63e8f4e0ea0c0 SHA512 5422762ffe1022ad4588bcf2bdce1a041c37b5edffed70fcefdd911b2dbf8b52cba9ec57670d48d7b69be6b71e354d333d6eaa5ac5260df56ed8ab18fd98fe88
 DIST nvidia-nccl-0ceaec9cee96ae7658aa45686853286651f36384.tar.gz 107044 BLAKE2B fd018b1d40977add357e98382f41e3fa6456bdc9a181e2d6cc7a3bc7fb98d242b3216b5ed0c40323bbcc006ab5221b49819ebc1dccd0c8dd700d9504f085eed1 SHA512 e66f323abb68feeec41927cc23e3a85d82fd1b7105a6232626edba95acb9eb22b3fb8b28d1a7adb536c47c936f68271fe5d8f66d3059e3128608ad826dd7622e
-DIST nvidia-nccl-f93fe9bfd94884cec2ba711897222e0df5569a53.tar.gz 95617 BLAKE2B 023788db444b1a50f01ab16f126c920f530a6ff5bf2a9327626db45f985b4093c1024d16d43cdee9f0b3ee5c8b7d104f2a6b1dab0460c6b1998b42a0354f6f27 SHA512 d6b4dd4d6c7c9ca2dcf0699055c9193d4b46c0c5155e0108db38094577c92f954f604b2e83ca320873288e0010eaa3c5812f05f7fafe5112bebbca78e56cb279
-DIST oourafft-20061228.tgz 72213 BLAKE2B 4a3ac2b4c0bc3275b5743df59241e1cdbd0200371c153ddf54ef6c7c4ec523ee6560547e2d5ef9f3200037c0635bf41c18991ac35f271b1e600d0dbd65d1a9a7 SHA512 f1ceac00cb7b9eb8f625eee7f1f5eea8af363343589a344226628d68baf668c176e6c23b7f904c4e682330352eaa0cd5d00731340d208e94c9657b8f85ae2240
 DIST oourafft2d-20061228.tgz 54434 BLAKE2B 382dd5787155b877a2ad1d624596afb5dbbe146a7aedf044d7f016949deedac871a512a030b5e6ac9ea5310e216788ffb99af1dd9d2a0de3f1ad72d9e5e7a3db SHA512 af993f68e8e1eb3cb927a51e86da8f74cfafc912a7cd055515e50fe543dd19ab5a6f7b1c2be4a55d6f4a0e5d766ead34c3be4c5705be6353f78cb2a55bd5cf16
 DIST pybind11-v2.3.0.tar.gz 564520 BLAKE2B a7c1485aa8f63a00eccd3103cf9f531a080b049be02e6182b30503ae6dd6c55fc517684ed9d92aaec244efd5f77336c68d1ea546833f7f0ae2fd7b090c19a02d SHA512 04542fe9dca2ffdb86c15698a4bbc041446a9f11970092e89a482ae0ff64c932f62fff8f437b98bdaa006c1fccabddd79f0c7e376f27daed2150b1d2f988e8aa
 DIST re2-506cfa4bffd060c06ec338ce50ea3468daa6c814.tar.gz 316237 BLAKE2B 8a975bc3e78b690f70944651c7022c6d946f40b2eb5fe01b2e203141f94b890cf1ad5c72af8e7c6ff3c5242d42af5a8657ed69e3c74d9ac081beb15de6334c0c SHA512 e3e00e92700d08bc55ba0e96148ddf1e00de2d329c1fdac08691ff15976937829f66dbc84af886a4b749ad66ba4ab208d66811e97e33bb9c2d7359735406c450
-DIST rules_docker-b8ff6a85ec359db3fd5657accd3e524daf12016d.tar.gz 460121 BLAKE2B bca31bbdd9069647fa9560002e63a404e2bd4d4ebd19d90e28d08f797f8f63157cc127dab26b996bb976e50f099928a91c1d6343427cb85048c6ca2b47c0c7e2 SHA512 461a663189e5995e31ee8175ee260e1ad06b00ee8a7548b9d3a946a2de693311031ec6eba89e3bad527f5d8f76ed25626ef4ea05a8d6e7579932dc1be3b0a6ba
-DIST tensorflow-1.14.0-python-license.rst.txt 45132 BLAKE2B 8a8ace4b64fd9eda1932272704dd4fea002d2562cd9c2c94470d3ff5de58bb171ff849d3f998ea90cd779d1084d39af1267aca8400b74b701205d2e76e6a2628 SHA512 c79c42f4ceecf33c8f8778ca17b53595b75d5d0bdf30b11058cf01dd70a6351f530ca9dd846460aba14edc1d5876f7bf6fa0f9f49f7915590b0f54d267ce3856
-DIST tensorflow-1.14.0.tar.gz 41335204 BLAKE2B d91d84c5d86c838cc659c45e28cf3c8a9ec9a02f5854a1826680806dae41cdd00ea49a6d2c04f2019b3400c6e267c6ca6f3b28e43b244569dca174ff7f2c59d6 SHA512 ac9ea5a2d1c761aaafbdc335259e29c128127b8d069ec5b206067935180490aa95e93c7e13de57f7f54ce4ba4f34a822face22b4a028f60185edb380e5cd4787
 DIST tensorflow-1.15.0-python-license.rst.txt 45132 BLAKE2B 8a8ace4b64fd9eda1932272704dd4fea002d2562cd9c2c94470d3ff5de58bb171ff849d3f998ea90cd779d1084d39af1267aca8400b74b701205d2e76e6a2628 SHA512 c79c42f4ceecf33c8f8778ca17b53595b75d5d0bdf30b11058cf01dd70a6351f530ca9dd846460aba14edc1d5876f7bf6fa0f9f49f7915590b0f54d267ce3856
 DIST tensorflow-1.15.0.tar.gz 46183168 BLAKE2B 5ccd87d5b53d6c543728e9c7ef96f12149bd8729620838b07a28f752fd5c57ef88350c2b62e8c44ec3561469eda38d8a64cbfca5e1ac7850674e1dd2c4d605d6 SHA512 f8683a950be6a3fa234d42e0f46bd9e049280e0c1d203c5c00ad44cf4728d894f2c301939d6d71203f815130a1ead53127adcc7565009589ff5aafc878b4dba7
-DIST tensorflow-1.15.0_rc0.tar.gz 46177850 BLAKE2B a4cb339018d04da26993400e99bacbe8c88dd68e17158b3effac125ee90b73d441084d530359912c40d1981c7688511ad41117067428233e9310c1fc8617f1d0 SHA512 f9ba5f2c952b7d8be0f3e2b78336513efa07d5323bc28217335391538577ba2b91687cf92f759d4ec03b02dd1c2961de4b54df205e7952c706f5b632029b7c7d
 DIST tensorflow-2.0.0.tar.gz 46176997 BLAKE2B 761e91a774f61b50459240a83fe52f723a76c2cc3eaf7c371684849cb7365b94fb3743ab8de9adffe64bef0f70da2bb0b7e6426a3f402e436eff841d3edefe8d SHA512 c3a2cb9673d2ade8e83961f7d944165123d0e4d94e6ca4d8fdee4f02f536893b879bbbf78d3ad5e557467b58f97f7194f2d9cfc64d7d05540969be732070eecc
 DIST tensorflow-2.1.0_rc0.tar.gz 41625131 BLAKE2B bc286f2f8d76a38973706acf3d839a4f54843b2f37ba5485eb149beacd55c7ac13660592acbc2413f8e60242c4a34b90cea8f1d9708d3f086e7cae73cc338832 SHA512 1481af1ed43ff03f6a0eeaad9896e2ec021d763bc69bf03e33353d6188074fdfaa1b6c3bba1d4e2496207f468e6a555fe2c962ae15eddd30ef872a3d76e19911
-DIST tensorflow-patches-1.14.0-r2.tar.bz2 7213 BLAKE2B 51719fc02049d564b7e5c00a76375af9a7e8dfc73753ba5f0d53163dc1fa00d23bb7ed03553a0bce68b8b90cd5bfe21d29e7a57a827db2c68ceef65f3cb0e925 SHA512 35e1a410060cdde99df148bfd06f042b91e021f7a692ae9c0100c67555de553bc248d73db762ed0239117296fb1147e2b67cdf0dd3dc94dee6fe9804c08a7360

diff --git a/sci-libs/tensorflow/files/tensorflow-1.15.0_rc0-0003-python-deps.patch b/sci-libs/tensorflow/files/tensorflow-1.15.0_rc0-0003-python-deps.patch
deleted file mode 100644
index 39fc269b73b..00000000000
--- a/sci-libs/tensorflow/files/tensorflow-1.15.0_rc0-0003-python-deps.patch
+++ /dev/null
@@ -1,15 +0,0 @@
-diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py
-index fec08f66d7..9d5d1f2a3f 100644
---- a/tensorflow/tools/pip_package/setup.py
-+++ b/tensorflow/tools/pip_package/setup.py
-@@ -62,8 +62,8 @@ REQUIRED_PACKAGES = [
-     'opt_einsum >= 2.3.2',
-     'six >= 1.10.0',
-     'protobuf >= 3.6.1',
--    'tb-nightly >= 1.15.0a20190821, < 1.15.0a20190822',
--    'tf-estimator-nightly >= 1.14.0.dev2019090401, < 1.14.0.dev2019090402',
-+    'tensorboard >= 1.15.0',
-+    'tensorflow_estimator >= 1.15.0',
-     'termcolor >= 1.1.0',
-     'wrapt >= 1.11.1',
- ]

diff --git a/sci-libs/tensorflow/tensorflow-1.14.0-r2.ebuild b/sci-libs/tensorflow/tensorflow-1.14.0-r2.ebuild
deleted file mode 100644
index 058b3c2aa83..00000000000
--- a/sci-libs/tensorflow/tensorflow-1.14.0-r2.ebuild
+++ /dev/null
@@ -1,340 +0,0 @@
-# Copyright 1999-2019 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=7
-
-DISTUTILS_OPTIONAL=1
-PYTHON_COMPAT=( python{3_5,3_6,3_7} )
-MY_PV=${PV/_rc/-rc}
-MY_P=${PN}-${MY_PV}
-
-inherit bazel check-reqs cuda distutils-r1 flag-o-matic toolchain-funcs
-
-DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
-HOMEPAGE="https://www.tensorflow.org/"
-
-LICENSE="Apache-2.0"
-SLOT="0"
-KEYWORDS="~amd64"
-IUSE="cuda mpi +python"
-CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
-for i in $CPU_USE_FLAGS_X86; do
-	IUSE+=" cpu_flags_x86_$i"
-done
-
-# distfiles that bazel uses for the workspace, will be copied to basel-distdir
-bazel_external_uris="
-	http://www.kurims.kyoto-u.ac.jp/~ooura/fft.tgz -> oourafft-20061228.tgz
-	https://bitbucket.org/eigen/eigen/get/a0d250e79c79.tar.gz -> eigen-a0d250e79c79.tar.gz
-	https://github.com/abseil/abseil-cpp/archive/daf381e8535a1f1f1b8a75966a74e7cca63dee89.tar.gz -> abseil-cpp-daf381e8535a1f1f1b8a75966a74e7cca63dee89.tar.gz
-	https://github.com/bazelbuild/bazel-skylib/archive/0.6.0.tar.gz -> bazel-skylib-0.6.0.tar.gz
-	https://github.com/bazelbuild/rules_closure/archive/cf1e44edb908e9616030cc83d085989b8e6cd6df.tar.gz -> bazelbuild-rules_closure-cf1e44edb908e9616030cc83d085989b8e6cd6df.tar.gz
-	https://github.com/bazelbuild/rules_swift/releases/download/0.9.0/rules_swift.0.9.0.tar.gz -> bazelbuild-rules_swift.0.9.0.tar.gz
-	https://github.com/bazelbuild/rules_docker/archive/b8ff6a85ec359db3fd5657accd3e524daf12016d.tar.gz -> rules_docker-b8ff6a85ec359db3fd5657accd3e524daf12016d.tar.gz
-	https://github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz -> farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz
-	https://github.com/google/gemmlowp/archive/12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip -> gemmlowp-12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip
-	https://github.com/google/highwayhash/archive/fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz -> highwayhash-fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz
-	https://github.com/nlopezgi/bazel-toolchains/archive/94d31935a2c94fe7e7c7379a0f3393e181928ff7.tar.gz -> bazel-toolchains-94d31935a2c94fe7e7c7379a0f3393e181928ff7.tar.gz
-	cuda? (
-		https://github.com/nvidia/nccl/archive/f93fe9bfd94884cec2ba711897222e0df5569a53.tar.gz -> nvidia-nccl-f93fe9bfd94884cec2ba711897222e0df5569a53.tar.gz
-		https://github.com/NVlabs/cub/archive/1.8.0.zip -> cub-1.8.0.zip
-	)
-	python? (
-		https://github.com/intel/ARM_NEON_2_x86_SSE/archive/1200fe90bb174a6224a525ee60148671a786a71f.tar.gz -> ARM_NEON_2_x86_SSE-1200fe90bb174a6224a525ee60148671a786a71f.tar.gz
-		http://mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.14.0-python-license.rst.txt
-		https://pypi.python.org/packages/bc/cc/3cdb0a02e7e96f6c70bd971bc8a90b8463fda83e264fa9c5c1c98ceabd81/backports.weakref-1.0rc1.tar.gz
-	)"
-
-SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
-		https://dev.gentoo.org/~perfinion/patches/tensorflow-patches-${PVR}.tar.bz2
-		${bazel_external_uris}"
-
-RDEPEND="
-	app-arch/snappy
-	dev-db/lmdb
-	dev-db/sqlite
-	dev-libs/icu
-	>=dev-libs/jsoncpp-1.9
-	dev-libs/libpcre
-	dev-libs/nsync
-	dev-libs/openssl:0=
-	>=dev-libs/protobuf-3.6.0:=
-	>=dev-libs/re2-0.2018.04.01
-	media-libs/giflib
-	media-libs/libjpeg-turbo
-	media-libs/libpng:0
-	>=net-libs/grpc-1.22.0
-	net-misc/curl
-	sys-libs/zlib
-	>=sys-apps/hwloc-2
-	cuda? (
-		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
-		dev-libs/cudnn
-	)
-	mpi? ( virtual/mpi )
-	python? (
-		${PYTHON_DEPS}
-		>=dev-libs/flatbuffers-1.8.0
-		dev-python/absl-py[${PYTHON_USEDEP}]
-		>=dev-python/astor-0.7.1[${PYTHON_USEDEP}]
-		dev-python/gast[${PYTHON_USEDEP}]
-		dev-python/numpy[${PYTHON_USEDEP}]
-		dev-python/google-pasta[${PYTHON_USEDEP}]
-		>=dev-python/protobuf-python-3.6.0[${PYTHON_USEDEP}]
-		dev-python/six[${PYTHON_USEDEP}]
-		dev-python/termcolor[${PYTHON_USEDEP}]
-		>=dev-python/grpcio-1.22.0[${PYTHON_USEDEP}]
-		>=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
-		>=net-libs/google-cloud-cpp-0.10.0
-		>=sci-libs/keras-applications-1.0.6[${PYTHON_USEDEP}]
-		>=sci-libs/keras-preprocessing-1.0.5[${PYTHON_USEDEP}]
-		>=sci-visualization/tensorboard-1.13.0[${PYTHON_USEDEP}]
-	)"
-DEPEND="${RDEPEND}
-	dev-python/mock"
-PDEPEND="python? (
-		>=sci-libs/tensorflow-estimator-1.13.0[${PYTHON_USEDEP}]
-	)"
-BDEPEND="
-	app-arch/unzip
-	>=dev-libs/protobuf-3.6.0
-	dev-java/java-config
-	dev-python/mock
-	dev-lang/swig
-	dev-python/cython
-	|| (
-		=dev-util/bazel-0.24*
-		=dev-util/bazel-0.26*
-		=dev-util/bazel-0.27*
-	)
-	cuda? (
-		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
-	)
-	!python? ( dev-lang/python )
-	python? (
-		>=dev-python/grpcio-tools-1.22.0
-	)"
-REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
-
-S="${WORKDIR}/${MY_P}"
-
-DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
-CHECKREQS_MEMORY="5G"
-CHECKREQS_DISK_BUILD="5G"
-
-get-cpu-flags() {
-	local i f=()
-	# Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
-	for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
-		use cpu_flags_x86_${i} && f+=( -m${i/_/.} )
-	done
-	use cpu_flags_x86_fma3 && f+=( -mfma )
-	echo "${f[*]}"
-}
-
-pkg_setup() {
-	local num_pythons_enabled
-	num_pythons_enabled=0
-	count_impls(){
-		num_pythons_enabled=$((${num_pythons_enabled} + 1))
-	}
-	use python && python_foreach_impl count_impls
-
-	# 5 G to build C/C++ libs, 5G per python impl
-	CHECKREQS_DISK_BUILD="$((5 + 5 * $num_pythons_enabled))G"
-	check-reqs_pkg_setup
-}
-
-src_unpack() {
-	# Only unpack the main distfile
-	unpack "${P}.tar.gz"
-	unpack tensorflow-patches-${PVR}.tar.bz2
-	bazel_load_distfiles "${bazel_external_uris}"
-}
-
-src_prepare() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	append-flags $(get-cpu-flags)
-	bazel_setup_bazelrc
-
-	eapply "${WORKDIR}"/patches/*.patch
-
-	default
-	use python && python_copy_sources
-
-	use cuda && cuda_add_sandbox
-}
-
-src_configure() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	do_configure() {
-		export CC_OPT_FLAGS=" "
-		export TF_ENABLE_XLA=0
-		export TF_NEED_OPENCL_SYCL=0
-		export TF_NEED_OPENCL=0
-		export TF_NEED_COMPUTECPP=0
-		export TF_NEED_ROCM=0
-		export TF_NEED_MPI=$(usex mpi 1 0)
-		export TF_SET_ANDROID_WORKSPACE=0
-
-		if use python; then
-			python_export PYTHON_SITEDIR
-			export PYTHON_BIN_PATH="${PYTHON}"
-			export PYTHON_LIB_PATH="${PYTHON_SITEDIR}"
-		else
-			export PYTHON_BIN_PATH="$(which python)"
-			export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"
-		fi
-
-		export TF_NEED_CUDA=$(usex cuda 1 0)
-		export TF_DOWNLOAD_CLANG=0
-		export TF_CUDA_CLANG=0
-		export TF_NEED_TENSORRT=0
-		if use cuda; then
-			export TF_CUDA_PATHS="${EPREFIX%/}/opt/cuda"
-			export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"
-			export TF_CUDA_VERSION="$(cuda_toolkit_version)"
-			export TF_CUDNN_VERSION="$(cuda_cudnn_version)"
-			einfo "Setting CUDA version: $TF_CUDA_VERSION"
-			einfo "Setting CUDNN version: $TF_CUDNN_VERSION"
-		fi
-
-		local SYSLIBS=(
-			absl_py
-			astor_archive
-			boringssl
-			com_github_googleapis_googleapis
-			com_github_googlecloudplatform_google_cloud_cpp
-			com_google_protobuf
-			com_google_protobuf_cc
-			com_googlesource_code_re2
-			curl
-			cython
-			double_conversion
-			enum34_archive
-			flatbuffers
-			gast_archive
-			gif_archive
-			grpc
-			hwloc
-			icu
-			jpeg
-			jsoncpp_git
-			keras_applications_archive
-			lmdb
-			nasm
-			nsync
-			org_sqlite
-			pasta
-			pcre
-			png_archive
-			protobuf_archive
-			six_archive
-			snappy
-			swig
-			termcolor_archive
-			wrapt
-			zlib_archive
-		)
-
-		export TF_SYSTEM_LIBS="${SYSLIBS[@]}"
-		export TF_IGNORE_MAX_BAZEL_VERSION=1
-
-		# This is not autoconf
-		./configure || die
-
-		echo 'build --config=noaws --config=nohdfs --config=noignite --config=nokafka' >> .bazelrc || die
-		echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
-		echo 'build --incompatible_no_support_tools_in_action_inputs=false' >> .bazelrc || die
-	}
-	if use python; then
-		python_foreach_impl run_in_build_dir do_configure
-	else
-		do_configure
-	fi
-}
-
-src_compile() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	if use python; then
-		python_setup
-		BUILD_DIR="${S}-${EPYTHON/./_}"
-		cd "${BUILD_DIR}"
-	fi
-
-	# fail early if any deps are missing
-	ebazel build --nobuild \
-		//tensorflow:libtensorflow_framework.so \
-		//tensorflow:libtensorflow.so \
-		//tensorflow:libtensorflow_cc.so \
-		$(usex python '//tensorflow/tools/pip_package:build_pip_package' '')
-
-	ebazel build \
-		//tensorflow:libtensorflow_framework.so \
-		//tensorflow:libtensorflow.so
-	ebazel build //tensorflow:libtensorflow_cc.so
-
-	do_compile() {
-		ebazel build //tensorflow/tools/pip_package:build_pip_package
-	}
-	BUILD_DIR="${S}"
-	cd "${BUILD_DIR}"
-	use python && python_foreach_impl run_in_build_dir do_compile
-	ebazel shutdown
-}
-
-src_install() {
-	local i j
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	do_install() {
-		einfo "Installing ${EPYTHON} files"
-		local srcdir="${T}/src-${MULTIBUILD_VARIANT}"
-		mkdir -p "${srcdir}" || die
-		bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
-		cd "${srcdir}" || die
-		esetup.py install
-
-		# libtensorflow_framework.so is in /usr/lib already
-		python_export PYTHON_SITEDIR PYTHON_SCRIPTDIR
-		rm -f "${D}/${PYTHON_SITEDIR}"/${PN}/lib${PN}_framework.so* || die
-		python_optimize
-	}
-
-	if use python; then
-		python_foreach_impl run_in_build_dir do_install
-
-		# Symlink to python-exec scripts
-		for i in "${ED}"/usr/lib/python-exec/*/*; do
-			n="${i##*/}"
-			[[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"
-		done
-
-		python_setup
-		local BUILD_DIR="${S}-${EPYTHON/./_}"
-		cd "${BUILD_DIR}" || die
-	fi
-
-	einfo "Installing headers"
-	ebazel build //tensorflow:install_headers
-	ebazel shutdown
-	insinto /usr/include/${PN}/
-	doins -r bazel-genfiles/tensorflow/include/*
-
-	einfo "Installing libs"
-	# Generate pkg-config file
-	${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
-	insinto /usr/$(get_libdir)/pkgconfig
-	doins ${PN}.pc ${PN}_cc.pc
-
-	for l in libtensorflow{,_framework,_cc}.so; do
-		dolib.so bazel-bin/tensorflow/${l}
-		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1)
-		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3)
-	done
-
-	einstalldocs
-}

diff --git a/sci-libs/tensorflow/tensorflow-1.15.0_rc0.ebuild b/sci-libs/tensorflow/tensorflow-1.15.0_rc0.ebuild
deleted file mode 100644
index 0c9fba045c5..00000000000
--- a/sci-libs/tensorflow/tensorflow-1.15.0_rc0.ebuild
+++ /dev/null
@@ -1,359 +0,0 @@
-# Copyright 1999-2019 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=7
-
-DISTUTILS_OPTIONAL=1
-PYTHON_COMPAT=( python{3_5,3_6,3_7} )
-MY_PV=${PV/_rc/-rc}
-MY_P=${PN}-${MY_PV}
-
-inherit bazel check-reqs cuda distutils-r1 flag-o-matic toolchain-funcs
-
-DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
-HOMEPAGE="https://www.tensorflow.org/"
-
-LICENSE="Apache-2.0"
-SLOT="0"
-KEYWORDS="~amd64"
-IUSE="cuda mpi +python xla"
-CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
-for i in $CPU_USE_FLAGS_X86; do
-	IUSE+=" cpu_flags_x86_$i"
-done
-
-# distfiles that bazel uses for the workspace, will be copied to basel-distdir
-bazel_external_uris="
-	http://www.kurims.kyoto-u.ac.jp/~ooura/fft2d.tgz -> oourafft2d-20061228.tgz
-	https://bitbucket.org/eigen/eigen/get/8071cda5714d.tar.gz -> eigen-8071cda5714d.tar.gz
-	https://github.com/abseil/abseil-cpp/archive/43ef2148c0936ebf7cb4be6b19927a9d9d145b8f.tar.gz -> abseil-cpp-43ef2148c0936ebf7cb4be6b19927a9d9d145b8f.tar.gz
-	https://github.com/bazelbuild/bazel-skylib/releases/download/0.8.0/bazel-skylib.0.8.0.tar.gz
-	https://github.com/bazelbuild/bazel-toolchains/archive/92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz -> bazel-toolchains-92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz
-	https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz
-	https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz
-	https://github.com/bazelbuild/rules_swift/releases/download/0.11.1/rules_swift.0.11.1.tar.gz -> bazelbuild-rules_swift.0.11.1.tar.gz
-	https://github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz -> farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz
-	https://github.com/google/gemmlowp/archive/12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip -> gemmlowp-12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip
-	https://github.com/google/highwayhash/archive/fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz -> highwayhash-fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz
-	https://github.com/mborgerding/kissfft/archive/36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz -> kissfft-36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz
-	https://github.com/nlopezgi/bazel-toolchains/archive/94d31935a2c94fe7e7c7379a0f3393e181928ff7.tar.gz -> bazel-toolchains-94d31935a2c94fe7e7c7379a0f3393e181928ff7.tar.gz
-	https://github.com/pybind/pybind11/archive/v2.3.0.tar.gz -> pybind11-v2.3.0.tar.gz
-	https://github.com/llvm-mirror/llvm/archive/7a7e03f906aada0cf4b749b51213fe5784eeff84.tar.gz -> llvm-7a7e03f906aada0cf4b749b51213fe5784eeff84.tar.gz
-	cuda? (
-		https://github.com/nvidia/nccl/archive/0ceaec9cee96ae7658aa45686853286651f36384.tar.gz -> nvidia-nccl-0ceaec9cee96ae7658aa45686853286651f36384.tar.gz
-		https://github.com/NVlabs/cub/archive/1.8.0.zip -> cub-1.8.0.zip
-	)
-	python? (
-		https://github.com/intel/ARM_NEON_2_x86_SSE/archive/1200fe90bb174a6224a525ee60148671a786a71f.tar.gz -> ARM_NEON_2_x86_SSE-1200fe90bb174a6224a525ee60148671a786a71f.tar.gz
-		https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt
-		https://pypi.python.org/packages/bc/cc/3cdb0a02e7e96f6c70bd971bc8a90b8463fda83e264fa9c5c1c98ceabd81/backports.weakref-1.0rc1.tar.gz
-	)"
-
-SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
-		${bazel_external_uris}"
-
-RDEPEND="
-	app-arch/snappy
-	dev-db/lmdb
-	dev-db/sqlite
-	dev-libs/double-conversion
-	dev-libs/icu
-	~dev-libs/jsoncpp-1.9.1
-	dev-libs/libpcre
-	dev-libs/nsync
-	dev-libs/openssl:0=
-	>=dev-libs/protobuf-3.6.1:=
-	>=dev-libs/re2-0.2018.04.01
-	media-libs/giflib
-	media-libs/libjpeg-turbo
-	media-libs/libpng:0
-	>=net-libs/grpc-1.22.0
-	net-misc/curl
-	sys-libs/zlib
-	>=sys-apps/hwloc-2
-	cuda? (
-		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
-		dev-libs/cudnn
-	)
-	mpi? ( virtual/mpi )
-	python? (
-		${PYTHON_DEPS}
-		>=dev-libs/flatbuffers-1.8.0
-		dev-python/absl-py[${PYTHON_USEDEP}]
-		>=dev-python/astor-0.7.1[${PYTHON_USEDEP}]
-		dev-python/gast[${PYTHON_USEDEP}]
-		>=dev-python/numpy-1.16[${PYTHON_USEDEP}]
-		dev-python/google-pasta[${PYTHON_USEDEP}]
-		dev-python/opt-einsum[${PYTHON_USEDEP}]
-		>=dev-python/protobuf-python-3.6.1[${PYTHON_USEDEP}]
-		dev-python/six[${PYTHON_USEDEP}]
-		dev-python/termcolor[${PYTHON_USEDEP}]
-		>=dev-python/grpcio-1.22.0[${PYTHON_USEDEP}]
-		>=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
-		>=net-libs/google-cloud-cpp-0.10.0
-		>=sci-libs/keras-applications-1.0.8[${PYTHON_USEDEP}]
-		>=sci-libs/keras-preprocessing-1.0.5[${PYTHON_USEDEP}]
-		>=sci-visualization/tensorboard-1.15.0[${PYTHON_USEDEP}]
-	)"
-DEPEND="${RDEPEND}
-	dev-python/mock"
-PDEPEND="python? (
-		>=sci-libs/tensorflow-estimator-1.15.0[${PYTHON_USEDEP}]
-	)"
-BDEPEND="
-	app-arch/unzip
-	>=dev-libs/protobuf-3.6.0
-	dev-java/java-config
-	dev-python/mock
-	dev-lang/swig
-	dev-python/cython
-	|| (
-		=dev-util/bazel-0.24*
-		=dev-util/bazel-0.27*
-	)
-	cuda? (
-		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
-	)
-	!python? ( dev-lang/python )
-	python? (
-		>=dev-python/grpcio-tools-1.22.0
-	)"
-REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
-
-S="${WORKDIR}/${MY_P}"
-
-PATCHES=(
-	"${FILESDIR}/tensorflow-1.15.0_rc0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch"
-	"${FILESDIR}/tensorflow-1.15.0_rc0-0002-systemlibs-unbundle-functools32.patch"
-	"${FILESDIR}/tensorflow-1.15.0_rc0-0003-python-deps.patch"
-)
-DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
-CHECKREQS_MEMORY="5G"
-CHECKREQS_DISK_BUILD="5G"
-
-get-cpu-flags() {
-	local i f=()
-	# Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
-	for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
-		use cpu_flags_x86_${i} && f+=( -m${i/_/.} )
-	done
-	use cpu_flags_x86_fma3 && f+=( -mfma )
-	echo "${f[*]}"
-}
-
-pkg_setup() {
-	local num_pythons_enabled
-	num_pythons_enabled=0
-	count_impls(){
-		num_pythons_enabled=$((${num_pythons_enabled} + 1))
-	}
-	use python && python_foreach_impl count_impls
-
-	# 5 G to build C/C++ libs, 5G per python impl
-	CHECKREQS_DISK_BUILD="$((5 + 5 * $num_pythons_enabled))G"
-	check-reqs_pkg_setup
-}
-
-src_unpack() {
-	# Only unpack the main distfile
-	unpack "${P}.tar.gz"
-	bazel_load_distfiles "${bazel_external_uris}"
-}
-
-src_prepare() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	append-flags $(get-cpu-flags)
-	bazel_setup_bazelrc
-
-	default
-	use python && python_copy_sources
-
-	use cuda && cuda_add_sandbox
-}
-
-src_configure() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	do_configure() {
-		export CC_OPT_FLAGS=" "
-		export TF_ENABLE_XLA=$(usex xla 1 0)
-		export TF_NEED_OPENCL_SYCL=0
-		export TF_NEED_OPENCL=0
-		export TF_NEED_COMPUTECPP=0
-		export TF_NEED_ROCM=0
-		export TF_NEED_MPI=$(usex mpi 1 0)
-		export TF_SET_ANDROID_WORKSPACE=0
-
-		if use python; then
-			python_export PYTHON_SITEDIR
-			export PYTHON_BIN_PATH="${PYTHON}"
-			export PYTHON_LIB_PATH="${PYTHON_SITEDIR}"
-		else
-			export PYTHON_BIN_PATH="$(which python)"
-			export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"
-		fi
-
-		export TF_NEED_CUDA=$(usex cuda 1 0)
-		export TF_DOWNLOAD_CLANG=0
-		export TF_CUDA_CLANG=0
-		export TF_NEED_TENSORRT=0
-		if use cuda; then
-			export TF_CUDA_PATHS="${EPREFIX%/}/opt/cuda"
-			export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"
-			export TF_CUDA_VERSION="$(cuda_toolkit_version)"
-			export TF_CUDNN_VERSION="$(cuda_cudnn_version)"
-			einfo "Setting CUDA version: $TF_CUDA_VERSION"
-			einfo "Setting CUDNN version: $TF_CUDNN_VERSION"
-
-			if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then
-				ewarn "WARNING: Tensorflow is being built with its default CUDA compute capabilities: 3.5 and 7.0."
-				ewarn "These may not be optimal for your GPU."
-				ewarn ""
-				ewarn "To configure Tensorflow with the CUDA compute capability that is optimal for your GPU,"
-				ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow."
-				ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5"
-				ewarn ""
-				ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus"
-				ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'"
-			fi
-		fi
-
-		local SYSLIBS=(
-			absl_py
-			astor_archive
-			boringssl
-			com_github_googleapis_googleapis
-			com_github_googlecloudplatform_google_cloud_cpp
-			com_google_protobuf
-			com_googlesource_code_re2
-			curl
-			cython
-			double_conversion
-			enum34_archive
-			flatbuffers
-			functools32_archive
-			gast_archive
-			gif_archive
-			grpc
-			hwloc
-			icu
-			jpeg
-			jsoncpp_git
-			keras_applications_archive
-			lmdb
-			nasm
-			nsync
-			opt_einsum_archive
-			org_sqlite
-			pasta
-			pcre
-			png_archive
-			six_archive
-			snappy
-			swig
-			termcolor_archive
-			wrapt
-			zlib_archive
-		)
-
-		export TF_SYSTEM_LIBS="${SYSLIBS[@]}"
-		export TF_IGNORE_MAX_BAZEL_VERSION=1
-
-		# This is not autoconf
-		./configure || die
-
-		echo 'build --config=noaws --config=nohdfs --config=noignite --config=nokafka' >> .bazelrc || die
-		echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
-		echo 'build --incompatible_no_support_tools_in_action_inputs=false' >> .bazelrc || die
-	}
-	if use python; then
-		python_foreach_impl run_in_build_dir do_configure
-	else
-		do_configure
-	fi
-}
-
-src_compile() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	if use python; then
-		python_setup
-		BUILD_DIR="${S}-${EPYTHON/./_}"
-		cd "${BUILD_DIR}"
-	fi
-
-	# fail early if any deps are missing
-	ebazel build --nobuild \
-		//tensorflow:libtensorflow_framework.so \
-		//tensorflow:libtensorflow.so \
-		//tensorflow:libtensorflow_cc.so \
-		$(usex python '//tensorflow/tools/pip_package:build_pip_package' '')
-
-	ebazel build \
-		//tensorflow:libtensorflow_framework.so \
-		//tensorflow:libtensorflow.so
-	ebazel build //tensorflow:libtensorflow_cc.so
-
-	do_compile() {
-		ebazel build //tensorflow/tools/pip_package:build_pip_package
-	}
-	BUILD_DIR="${S}"
-	cd "${BUILD_DIR}"
-	use python && python_foreach_impl run_in_build_dir do_compile
-	ebazel shutdown
-}
-
-src_install() {
-	local i j
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	do_install() {
-		einfo "Installing ${EPYTHON} files"
-		local srcdir="${T}/src-${MULTIBUILD_VARIANT}"
-		mkdir -p "${srcdir}" || die
-		bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
-		cd "${srcdir}" || die
-		esetup.py install
-
-		# libtensorflow_framework.so is in /usr/lib already
-		python_export PYTHON_SITEDIR PYTHON_SCRIPTDIR
-		rm -f "${D}/${PYTHON_SITEDIR}"/${PN}/lib${PN}_framework.so* || die
-		rm -f "${D}/${PYTHON_SITEDIR}"/${PN}_core/lib${PN}_framework.so* || die
-		python_optimize
-	}
-
-	if use python; then
-		python_foreach_impl run_in_build_dir do_install
-
-		# Symlink to python-exec scripts
-		for i in "${ED}"/usr/lib/python-exec/*/*; do
-			n="${i##*/}"
-			[[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"
-		done
-
-		python_setup
-		local BUILD_DIR="${S}-${EPYTHON/./_}"
-		cd "${BUILD_DIR}" || die
-	fi
-
-	einfo "Installing headers"
-	ebazel build //tensorflow:install_headers
-	ebazel shutdown
-	insinto /usr/include/${PN}/
-	doins -r bazel-genfiles/tensorflow/include/*
-
-	einfo "Installing libs"
-	# Generate pkg-config file
-	${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
-	insinto /usr/$(get_libdir)/pkgconfig
-	doins ${PN}.pc ${PN}_cc.pc
-
-	for l in libtensorflow{,_framework,_cc}.so; do
-		dolib.so bazel-bin/tensorflow/${l}
-		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1)
-		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3)
-	done
-
-	einstalldocs
-}


^ permalink raw reply related	[flat|nested] 11+ messages in thread

* [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/
@ 2020-08-30  0:26 Jason Zaman
  0 siblings, 0 replies; 11+ messages in thread
From: Jason Zaman @ 2020-08-30  0:26 UTC (permalink / raw
  To: gentoo-commits

commit:     1b40fafc11d0fa99a1c5b97111b5e23ba421c0b4
Author:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
AuthorDate: Sat Aug 29 22:04:18 2020 +0000
Commit:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
CommitDate: Sun Aug 30 00:25:11 2020 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=1b40fafc

sci-libs/tensorflow: Drop 1.15 and 2.0

Package-Manager: Portage-2.3.103, Repoman-2.3.23
Signed-off-by: Jason Zaman <perfinion <AT> gentoo.org>

 sci-libs/tensorflow/Manifest                       |   8 -
 ...dd-rules-docker-http_archive-bazel-toolch.patch |  38 ---
 ..._rc0-0002-systemlibs-unbundle-functools32.patch |  61 ----
 .../tensorflow-2.1.0-cuda_10.2_support_bin2c.patch |  12 -
 sci-libs/tensorflow/tensorflow-1.15.0.ebuild       | 360 ---------------------
 sci-libs/tensorflow/tensorflow-2.0.0.ebuild        | 359 --------------------
 6 files changed, 838 deletions(-)

diff --git a/sci-libs/tensorflow/Manifest b/sci-libs/tensorflow/Manifest
index 96ff16f223c..1640aa58f4d 100644
--- a/sci-libs/tensorflow/Manifest
+++ b/sci-libs/tensorflow/Manifest
@@ -3,7 +3,6 @@ DIST abseil-cpp-43ef2148c0936ebf7cb4be6b19927a9d9d145b8f.tar.gz 1281523 BLAKE2B
 DIST backports.weakref-1.0rc1.tar.gz 7761 BLAKE2B 4cb2554a4b71bb1f8c41a790268511e4b382effc7e0328f74346d086de539a177111de9e2dabac19a44b2a1cdbf59a7425e30eee4caa4bfe66b0ca97f1f460a9 SHA512 f37e9eb0d9060d2e1588a941f623460bd4477d7e180b38b5cd46d9c1db1ee094ae63e4f5eeeb422823bf3d067f46c4124cb124a9e4ddb795bc0bfbc4802ab826
 DIST bazel-skylib.0.8.0.tar.gz 72941 BLAKE2B 1928989d3e5dc338fee974c5592a524adb492db172b1d2a849ed5f14f91dbb76606335d369e4b15c4eec41adfe3a23cdc5b4f3b2f1351a5e9c25103c002b96a2 SHA512 4729941cedca8ddb876a621b28221d3c17dbb5dd72af27e55f48274208f21db986c5e7cb8adb742c42124bfa8073eb782b49e2557cb3f3f95ca56833fef925c0
 DIST bazel-toolchains-92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz 660374 BLAKE2B 549292aa3ad8a6e01c29391a123f3cbc84d1f714ff9c68bfc6c8df07fa0ba3b19c22e61627f227a8a484f6d7d6c59935fd7560c447559ff5e774ddf1437eefe1 SHA512 83531a2646a9f63557eff6fefaf551ee7e9f0ccdb31374d7a17c35b9f9d844b9b405597972fcd31b2b68e9da312511bbfd9c8fbb75222f5924e1d2500cd1c2a6
-DIST bazel-toolchains-94d31935a2c94fe7e7c7379a0f3393e181928ff7.tar.gz 505681 BLAKE2B f16225eb11859f487335bddca115c9c2a05731f4bfe4240609a47e17b706d9a25b6b363b61404b4d5df0471e4dd9b4b9befbf78dbcb1c7c11e3f590039ff92b2 SHA512 7723baf2c710adb063375a33fd64e3941fae7e98c824764c266d5554cc0b0cbb1c91a5c2a9058bb4c8766ec404ca556199ce22965a6de4baee09cb740b7671c9
 DIST bazel_skylib-0.9.0.tar.gz 84644 BLAKE2B 40b69a524e632ea00d34b9cf805b882ac4c549dd465ef37f28e54729ff1b614a7610a4895cede4e0c938edba5b19f781f3c0bfdb62bde628ee0af21d6b679546 SHA512 af4bcb641fed05d15f2d0ffe0e3abdfe9c7a5deb9c53064992d6605d224ded1648b1bdfa293e161028913f9f2c178c68cc38bc16028892ca5b0f52b4e645209d
 DIST bazelbuild-apple_support.0.7.1.tar.gz 20070 BLAKE2B e72098fc5017e3371b6af0f33f392a42da18376052f30db5dd602c5ccc0176c1ac8002cca3f598845c5cb33406b4fd8d30c5ea47c69add6c530b75dc6566a4b0 SHA512 bbab25c229404123d7d424fa261480cf887c749ae729cc1a00bd6c2554d4b4b3fa81e0eaa670542b3a5d3301f3bd5dcd7727d011ba854f13dbbf4f65895e4b08
 DIST bazelbuild-rules_apple.0.18.0.tar.gz 1333446 BLAKE2B c7667e05c236f4de44573707c795d31c980242a2ea6eb69261403e7b1b7cc852d899c9b76cba347df1b1006542f9d532d78586a3bf5f831d37f8f0c0cd79a8eb SHA512 209b21856642b03dc7d54c0c82826224167df7da4c6dda3d29490e0661cdfe35ad9abe763288e0ff80af4d681a361efb22e0ca45ead6fb96fd38f211bcb4435d
@@ -13,12 +12,9 @@ DIST bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz 46
 DIST bazelbuild-rules_docker-v0.10.0.tar.gz 549649 BLAKE2B e7a537b21138a5c5d9ce360e46238f57c917d2dbf5dd17887607402227cbe7c5d68aead9db0ecdb74c09eed9dac26eb86e004a9020c251152d15beb48be0e0d7 SHA512 7802107f8520c88646728800637cce54dbcefc54aa4361288a5b46e403e41998bc23e16fbe94f23a2ca52e5c33fc255af65846963c2fd0139ca1195435ce1b03
 DIST bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip 9422 BLAKE2B bbc45c97551ee8126335b611ffca6574dac843d4db9b3221b10d26487f8eecfe38ba0cd67a4039a3ad921c25fea6294c43e4f4e7a57a060a30791acd6c4f9d39 SHA512 ba06242feb711e7fb6e821fd9ac029248b4f3e466fb1acf971d0db8196f3efe4d114ef137acbe4913073f8cbe8ccab4d47d8bafa21b867b2d6a6d57f5a647f0c
 DIST bazelbuild-rules_python-0.0.1.tar.gz 2302092 BLAKE2B 1db52eebf2461d779f764f2afdd070d1d0dd65eb2b83ccd98c2831da1784614ca281b114064729a9f257c64eceb62975aac8362d231c84f32abdf19aee7a1852 SHA512 40fa069a4482e2f83e29dc8e109652d14d187b2ec8efdcd36e98d117de93d66a938ed74999b42a2293fcb6eccc0a111cbbcf65c5c155579214bb1b96644280a5
-DIST bazelbuild-rules_swift.0.11.1.tar.gz 107453 BLAKE2B 8cd686b945b5c9de9c8bfcaca38cd7c513bb8b55b4584bc569463997e7c5abcac3be4a5a31ccf95a494d17f3078ec9b1052497dba228fa3159a0c0be2a83eb88 SHA512 3479d6d8ce9111a39b9ef2e014399abc904e432841570dd0380104d80e4d3a74b6ff5cc6d2b250bcd15b152efa29d08f9ed092f70fd8bafa6deaa6f847d7f626
 DIST bazelbuild-rules_swift.0.12.1.tar.gz 111674 BLAKE2B 7c116d6098aa0725034dce4c83628c31c85c36a6331b44636deb2efe0f689a5baee8081b33d65e3727a83bca865bcabedd6f8fa1fdb60e901a9b60840c52c018 SHA512 6136feb3dc0074f82500000cf5335f046f21ea3a1edcb4dcb22228c82b56ed6a334e4b03e4e26a4d25549581abcf7df22b60a2bccf823a51bf270051fe18d489
 DIST cub-1.8.0.zip 602396 BLAKE2B a5e302a52e04f85ae8e1836c59e799532a8b961a8d10c08fe1241c9045f88e31f1ebda6d26124b85c6df40968e1c7b3985a39961bf7614535aafcab2da1a576a SHA512 6167c5be94989f88e05a07508cf69cf2c6b22a5ac733804043daa46bd86a44a15a758b1ffb811bab28b82039d027516ed85b3d9f7677a740a7e00ec9c2f81aed
 DIST dlpack-3efc489b55385936531a06ff83425b719387ec63.tar.gz 39637 BLAKE2B 532a3ce3d0e354a73c19433f0210a64ecbf00d9134d06a0ee7462baaff3f2b981524aa630cf250595671d6673105724482c7d69e803179aac247f2020c883bd0 SHA512 3728db5213e887fd946f64ba5dc0c336be9a72a0966ee83f146148749ffc93ddb342e0735df6117d7943874c18f87de5791a28e4a57ea420bb8a4e2a5b7338bc
-DIST eigen-049af2f56331.tar.gz 2485848 BLAKE2B 9d8a91782c9d40b7a6df101d2f43df25b5a3622bc02a7b9576929afb1f2f3d7c8b80671d24f1e31670883003b60c4cc0f3c9dadff0430d2ee047eb5122805edd SHA512 ab37106e9dfebbb42d7919c16b301887607080345b7ed63c8e3b8568489cfdc2c3ca0117a77905162cb3ba8324753040c841d2c96521fa4b9486f0778f1962df
-DIST eigen-49177915a14a.tar.gz 2496808 BLAKE2B 4e89b72a2ca121e9d1951a7179434c9eb70104ad45e63ae9811e10e28e32d51386acd2fe6938970404dcada8a377d1b3c07a68a98a68a036a6e85b2cf9ff4b48 SHA512 eb036225b41ffd8cc61ef99d2d22030c0d1e0f87eefcea4dd2b1fdb16a5316e0f4f17527983a00a8a684067654bf1f87c82cfaed5764829778660485ebbfa4b5
 DIST eigen-4e696901f873a2347f76d931cf2f701e31e15d05.tar.gz 2580537 BLAKE2B 927897c80f3529da40a76ddde26113403926d730fa4539aa9e4121406e8d3852abcfdbc55d178723300e9daa26176f6170db06bb49a59cb190e8c7e6dfdea4dd SHA512 d5461e615044aa94b6ac9ab2815536f4bdf2b9ccfeb578cc7bc62554ba928b4192450f72f1350f3231ed44d2bb90e95218de9a5d0bcee9ec02867499a6dc5db0
 DIST eigen-52a2fbbb008a47c5e3fb8ac1c65c2feecb0c511c.tar.gz 2585583 BLAKE2B a2ce9da5f962f891cba748f46ce43667ae2b7fef54d4cc83411eafbc24b331e586c44fb135a2c3597c77fe62b0c8ff1172ae5ce6876f00ddd6f6ef2eca802d9c SHA512 65a73d26474547db83b9b40a133dd2a021e0f5a459104f63124461b4aac28bf3579dc81475441b3576901f28f704d6d4624939b22ecfbfbec896148c4c38da7c
 DIST farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz 467122 BLAKE2B 8b9dd426f4b9f732df6c8c09d868d1b1cc006c395b1525c46ea91c75318b723358c8e64bb0d86a73aace2032eded93f0d80cc7c2c77fddd6a39e3402ab4f2cb7 SHA512 7f5110514a8cdc7ce48d33fd94ad67d96a24e42e11b203e4d0986f4190545f051b2a63f8e09f2ac38c3aa4a5c62d9a0e02cd8ce02045037381fb62a99f0dcd00
@@ -26,8 +22,6 @@ DIST gemmlowp-12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip 935628 BLAKE2B 09bd1d
 DIST highwayhash-fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz 138574 BLAKE2B cecd95039fc31545ced38a1949e7cb31d6f34deef7c8923b573aa7a632576d0a9b60fd24fbc8a40b8a840a7d2442ea28382fe842178a77467772ed4ba8a2cfb6 SHA512 d25162ed266a8c9c00e6978fcf642cf924f03c0d196f971d0d6f6a3454ca7dfc82f758eabd038ac08e0a5dacc51eccb9b674f588f64692f37cb7a17150aefe4f
 DIST kissfft-36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz 44744 BLAKE2B aeea5d23c122417ddb70a44354890242c4fb61c456fba9af79054293a1355939af51a681760ac27be55423f888649fd2826e634422c7e9cb3749a9d7f50ef990 SHA512 30f4fcd351bbcb950a27e2a77ca100ccdfd37deb17a05e7e31c3e8f1c56d7c626ab81bbd7497484e6b512e41f7b1bed714722a194b2218c669b297911101df7b
 DIST llvm-387c3f74fd8efdc0be464b0e1a8033cc1eeb739c.tar.gz 115936989 BLAKE2B 624b4a898321e62002cbf3955f844755b3ba6b1c8acf9a310ddac19cf21f9627902eb0b3684e42bac1fe835be972322a5cc667f78a6e2b0e699f956e0cab81e8 SHA512 09ec39ec7a25703bbb47befb406be3de6d15232c971fa186e33dff80192876a827e35523a9782365e26a90abd42b32b9a2ee1443af71cc23cd825b3ff05df629
-DIST llvm-7a7e03f906aada0cf4b749b51213fe5784eeff84.tar.gz 50144723 BLAKE2B b744756c9782e5506fd6628531409e305bd9ce9a79f1776c4fe674cec5b01a86ab1793dc602f704d295587c3d769d622402a4fe07c0d66269c7f610178693bce SHA512 43f185c8dd38ae7cb048bb9c98fb94e1f43ce66764b43a7141a06a1a0b011389fa139f34b7eaee72c39b8f44aeeff3e7a2189c11d10f8f8a93e9792a1acf2b5f
-DIST llvm-b7d166cebcf619a3691eed3f994384aab3d80fa6.tar.gz 49997477 BLAKE2B 66cbd71a4ad65768fb39fd622efd54a7821b83eb105871e82e0b99074960cb4ebd8197ef984b0be29d5178a7ce3c0d4839d2d611c96fe1507227f7f28fd7e156 SHA512 79c6d4986cedfc132f085d2794f295fdbefa772cb53d3870963d4851ea15ce8e2863aa50c903930abaff53d818ad97acfe8acd1c923ccbe26e3a99ac2397ae9f
 DIST llvm-ecc999101aadc8dc7d4af9fd88be10fe42674aa0.tar.gz 115788302 BLAKE2B ea18d6cb3cc8311a78e543996d88831e1ecf1dec58b7c890a80cd8fce86e1c52244b9cfc0d7ce0867480465c2d914555c16cf003bfdcf90056d63e8f4e0ea0c0 SHA512 5422762ffe1022ad4588bcf2bdce1a041c37b5edffed70fcefdd911b2dbf8b52cba9ec57670d48d7b69be6b71e354d333d6eaa5ac5260df56ed8ab18fd98fe88
 DIST nvidia-nccl-0ceaec9cee96ae7658aa45686853286651f36384.tar.gz 107044 BLAKE2B fd018b1d40977add357e98382f41e3fa6456bdc9a181e2d6cc7a3bc7fb98d242b3216b5ed0c40323bbcc006ab5221b49819ebc1dccd0c8dd700d9504f085eed1 SHA512 e66f323abb68feeec41927cc23e3a85d82fd1b7105a6232626edba95acb9eb22b3fb8b28d1a7adb536c47c936f68271fe5d8f66d3059e3128608ad826dd7622e
 DIST nvidia-nccl-3701130b3c1bcdb01c14b3cb70fe52498c1e82b7.tar.gz 123898 BLAKE2B 9a268e089877fde9ac900b43bbf829f7dedfa701b017568647e3564670d5457dc18c96846a2cc8674b243c0c7d97d322e7466167c322588af3ad4f2d189b175b SHA512 9b090a963213eb088edf247e72b51a6cc79669f507831b88205c75a006a43f3d2257c4e75a34c33987c7d95c421e2273642db4c716b02907e8de1b1ef5d080f0
@@ -36,8 +30,6 @@ DIST pybind11-v2.3.0.tar.gz 564520 BLAKE2B a7c1485aa8f63a00eccd3103cf9f531a080b0
 DIST re2-506cfa4bffd060c06ec338ce50ea3468daa6c814.tar.gz 316237 BLAKE2B 8a975bc3e78b690f70944651c7022c6d946f40b2eb5fe01b2e203141f94b890cf1ad5c72af8e7c6ff3c5242d42af5a8657ed69e3c74d9ac081beb15de6334c0c SHA512 e3e00e92700d08bc55ba0e96148ddf1e00de2d329c1fdac08691ff15976937829f66dbc84af886a4b749ad66ba4ab208d66811e97e33bb9c2d7359735406c450
 DIST sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz 2569603 BLAKE2B cd66dc8a0b4ad3ea586a79ef588c8d68e4f05b6ea89e6d8119b5ee338b9046c11a747ca57c2f80a3c90fab119c05e85f5965571c9e522ccb8a312b95804d1a36 SHA512 b819d050bb38246b503f1eb3d6e7d878d0f166f3602ae3c327749b1f6ddc2b1d6ac7f768b1f1d055b98b7b98812e4643a75bcebc9728c7f615d67ca739761b3e
 DIST tensorflow-1.15.0-python-license.rst.txt 45132 BLAKE2B 8a8ace4b64fd9eda1932272704dd4fea002d2562cd9c2c94470d3ff5de58bb171ff849d3f998ea90cd779d1084d39af1267aca8400b74b701205d2e76e6a2628 SHA512 c79c42f4ceecf33c8f8778ca17b53595b75d5d0bdf30b11058cf01dd70a6351f530ca9dd846460aba14edc1d5876f7bf6fa0f9f49f7915590b0f54d267ce3856
-DIST tensorflow-1.15.0.tar.gz 46183168 BLAKE2B 5ccd87d5b53d6c543728e9c7ef96f12149bd8729620838b07a28f752fd5c57ef88350c2b62e8c44ec3561469eda38d8a64cbfca5e1ac7850674e1dd2c4d605d6 SHA512 f8683a950be6a3fa234d42e0f46bd9e049280e0c1d203c5c00ad44cf4728d894f2c301939d6d71203f815130a1ead53127adcc7565009589ff5aafc878b4dba7
-DIST tensorflow-2.0.0.tar.gz 46176997 BLAKE2B 761e91a774f61b50459240a83fe52f723a76c2cc3eaf7c371684849cb7365b94fb3743ab8de9adffe64bef0f70da2bb0b7e6426a3f402e436eff841d3edefe8d SHA512 c3a2cb9673d2ade8e83961f7d944165123d0e4d94e6ca4d8fdee4f02f536893b879bbbf78d3ad5e557467b58f97f7194f2d9cfc64d7d05540969be732070eecc
 DIST tensorflow-2.1.0.tar.gz 41637803 BLAKE2B 63aac95cdb71097d2e7a257a691a2b51703fb22d2535395494adebfae0d3f97f8dd9e0deaadf1c2f3d3d26138066a7c6fb9b9728b5b0ac3d54360d7b5e4bebff SHA512 6d09ba3c097e4b3d712e7164d649841572c418d56c207eea6d31e3b1851b84ce57e82801d9887d84cfd8fcf990a3ea4c4b4b05265f8d625509986412c230c3c6
 DIST tensorflow-2.2.0.tar.gz 42100791 BLAKE2B 8b773ae9945ca267af33a34d86f6aef02310976bea1cdd98858d1405e8da845640b72a073b71fcfccb2b73f08600a5eada2c2332a8d9480bba71e9f25dbb2220 SHA512 94a2663497d333d543f363e9fea94fbcfdcdbbc0dfbaf009ba9181a808713aeb78f9b8805f56bea5fd3925a36f105427b3996795750589f378d8afbdadc2b86d
 DIST tensorflow-patches-2.1.0.tar.bz2 5737 BLAKE2B 67f5004312389ed19e810aad9f2d5d4839d806b5a7422963520e0dff06b91346683647db757ac463af13ca70861a7ed9c94be242910abacc6be350e162947225 SHA512 a20bfb619d59e0d2741a27ce36e3d38dbb7a5a706d6bc56f8aa94b5b72288139ad301fc06390b128ef1c3a88b758d09d0224351590dedcda9c5309cc55af6fed

diff --git a/sci-libs/tensorflow/files/tensorflow-1.15.0_rc0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch b/sci-libs/tensorflow/files/tensorflow-1.15.0_rc0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
deleted file mode 100644
index 08c7458f770..00000000000
--- a/sci-libs/tensorflow/files/tensorflow-1.15.0_rc0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-From 9a44d8e10d726cae992e611575b9dcb1627beede Mon Sep 17 00:00:00 2001
-From: Jason Zaman <jason@perfinion.com>
-Date: Wed, 11 Sep 2019 12:08:34 +0800
-Subject: [PATCH 1/2] WORKSPACE: add rules-docker http_archive,
- bazel-toolchains uses git_repo
-
-git_repository() rules cannot pull from --distdir and fail when building
-without internet access. Use http_archive instead and pin the sha256
-hash as well.
-
-Signed-off-by: Jason Zaman <jason@perfinion.com>
----
- WORKSPACE | 9 +++++++++
- 1 file changed, 9 insertions(+)
-
-diff --git a/WORKSPACE b/WORKSPACE
-index 74ea14d0fd..ccff2ba30f 100644
---- a/WORKSPACE
-+++ b/WORKSPACE
-@@ -12,6 +12,15 @@ http_archive(
-     ],
- )
- 
-+http_archive(
-+    name = "io_bazel_rules_docker",
-+    sha256 = "7d453450e1eb70e238eea6b31f4115607ec1200e91afea01c25f9804f37e39c8",
-+    strip_prefix = "rules_docker-0.10.0",
-+    urls = [
-+        "https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz",
-+    ],
-+)
-+
- # Load tf_repositories() before loading dependencies for other repository so
- # that dependencies like com_google_protobuf won't be overridden.
- load("//tensorflow:workspace.bzl", "tf_repositories")
--- 
-2.21.0
-

diff --git a/sci-libs/tensorflow/files/tensorflow-1.15.0_rc0-0002-systemlibs-unbundle-functools32.patch b/sci-libs/tensorflow/files/tensorflow-1.15.0_rc0-0002-systemlibs-unbundle-functools32.patch
deleted file mode 100644
index f8b72daddfc..00000000000
--- a/sci-libs/tensorflow/files/tensorflow-1.15.0_rc0-0002-systemlibs-unbundle-functools32.patch
+++ /dev/null
@@ -1,61 +0,0 @@
-From 05f57f785aeb58146b3873d4a0c614d897c56293 Mon Sep 17 00:00:00 2001
-From: Jason Zaman <jason@perfinion.com>
-Date: Wed, 11 Sep 2019 18:18:42 +0800
-Subject: [PATCH 2/2] systemlibs: unbundle functools32
-
-Signed-off-by: Jason Zaman <jason@perfinion.com>
----
- tensorflow/workspace.bzl                     |  1 +
- third_party/systemlibs/functools32.BUILD     | 15 +++++++++++++++
- third_party/systemlibs/syslibs_configure.bzl |  1 +
- 3 files changed, 17 insertions(+)
- create mode 100644 third_party/systemlibs/functools32.BUILD
-
-diff --git a/tensorflow/workspace.bzl b/tensorflow/workspace.bzl
-index d9fadc1030..760b7cd905 100755
---- a/tensorflow/workspace.bzl
-+++ b/tensorflow/workspace.bzl
-@@ -325,6 +325,7 @@ def tf_repositories(path_prefix = "", tf_repo_name = ""):
-         build_file = clean_dep("//third_party:functools32.BUILD"),
-         sha256 = "f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d",
-         strip_prefix = "functools32-3.2.3-2",
-+        system_build_file = clean_dep("//third_party/systemlibs:functools32.BUILD"),
-         urls = [
-             "https://storage.googleapis.com/mirror.tensorflow.org/pypi.python.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz",
-             "https://pypi.python.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz",
-diff --git a/third_party/systemlibs/functools32.BUILD b/third_party/systemlibs/functools32.BUILD
-new file mode 100644
-index 0000000000..5567ef6943
---- /dev/null
-+++ b/third_party/systemlibs/functools32.BUILD
-@@ -0,0 +1,15 @@
-+# Description:
-+#   functools32 provides a backport of the functools module for Python 2.
-+
-+licenses(["notice"])  # Python 2.0
-+
-+package(default_visibility = ["//visibility:public"])
-+
-+filegroup(
-+    name = "LICENSE",
-+)
-+
-+py_library(
-+    name = "functools32",
-+    srcs_version = "PY2AND3",
-+)
-diff --git a/third_party/systemlibs/syslibs_configure.bzl b/third_party/systemlibs/syslibs_configure.bzl
-index f83c0dd3d5..0884be8416 100644
---- a/third_party/systemlibs/syslibs_configure.bzl
-+++ b/third_party/systemlibs/syslibs_configure.bzl
-@@ -22,6 +22,7 @@ VALID_LIBS = [
-     "double_conversion",
-     "enum34_archive",
-     "flatbuffers",
-+    "functools32_archive",
-     "gast_archive",
-     "gif_archive",
-     "grpc",
--- 
-2.21.0
-

diff --git a/sci-libs/tensorflow/files/tensorflow-2.1.0-cuda_10.2_support_bin2c.patch b/sci-libs/tensorflow/files/tensorflow-2.1.0-cuda_10.2_support_bin2c.patch
deleted file mode 100644
index 4cc2f608d5d..00000000000
--- a/sci-libs/tensorflow/files/tensorflow-2.1.0-cuda_10.2_support_bin2c.patch
+++ /dev/null
@@ -1,12 +0,0 @@
-diff --git a/third_party/nccl/build_defs.bzl.tpl b/third_party/nccl/build_defs.bzl.tpl
-index 5719139855..5f5c3a1008 100644
---- a/third_party/nccl/build_defs.bzl.tpl
-+++ b/third_party/nccl/build_defs.bzl.tpl
-@@ -113,7 +113,6 @@ def _device_link_impl(ctx):
-             "--cmdline=--compile-only",
-             "--link",
-             "--compress-all",
--            "--bin2c-path=%s" % bin2c.dirname,
-             "--create=%s" % tmp_fatbin.path,
-             "--embedded-fatbin=%s" % fatbin_h.path,
-         ] + images,

diff --git a/sci-libs/tensorflow/tensorflow-1.15.0.ebuild b/sci-libs/tensorflow/tensorflow-1.15.0.ebuild
deleted file mode 100644
index 8190ff5d464..00000000000
--- a/sci-libs/tensorflow/tensorflow-1.15.0.ebuild
+++ /dev/null
@@ -1,360 +0,0 @@
-# Copyright 1999-2020 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=7
-
-DISTUTILS_OPTIONAL=1
-PYTHON_COMPAT=( python{3_6,3_7} )
-MY_PV=${PV/_rc/-rc}
-MY_P=${PN}-${MY_PV}
-
-inherit bazel check-reqs cuda distutils-r1 flag-o-matic toolchain-funcs
-
-DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
-HOMEPAGE="https://www.tensorflow.org/"
-
-LICENSE="Apache-2.0"
-SLOT="0"
-KEYWORDS="~amd64"
-IUSE="cuda mpi +python xla"
-CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
-for i in $CPU_USE_FLAGS_X86; do
-	IUSE+=" cpu_flags_x86_$i"
-done
-
-# distfiles that bazel uses for the workspace, will be copied to basel-distdir
-bazel_external_uris="
-	http://www.kurims.kyoto-u.ac.jp/~ooura/fft2d.tgz -> oourafft2d-20061228.tgz
-	https://bitbucket.org/eigen/eigen/get/49177915a14a.tar.gz -> eigen-49177915a14a.tar.gz
-	https://github.com/abseil/abseil-cpp/archive/43ef2148c0936ebf7cb4be6b19927a9d9d145b8f.tar.gz -> abseil-cpp-43ef2148c0936ebf7cb4be6b19927a9d9d145b8f.tar.gz
-	https://github.com/bazelbuild/bazel-skylib/releases/download/0.8.0/bazel-skylib.0.8.0.tar.gz
-	https://github.com/bazelbuild/bazel-toolchains/archive/92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz -> bazel-toolchains-92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz
-	https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz
-	https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz
-	https://github.com/bazelbuild/rules_swift/releases/download/0.11.1/rules_swift.0.11.1.tar.gz -> bazelbuild-rules_swift.0.11.1.tar.gz
-	https://github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz -> farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz
-	https://github.com/google/gemmlowp/archive/12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip -> gemmlowp-12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip
-	https://github.com/google/highwayhash/archive/fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz -> highwayhash-fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz
-	https://github.com/mborgerding/kissfft/archive/36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz -> kissfft-36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz
-	https://github.com/nlopezgi/bazel-toolchains/archive/94d31935a2c94fe7e7c7379a0f3393e181928ff7.tar.gz -> bazel-toolchains-94d31935a2c94fe7e7c7379a0f3393e181928ff7.tar.gz
-	https://github.com/pybind/pybind11/archive/v2.3.0.tar.gz -> pybind11-v2.3.0.tar.gz
-	https://github.com/llvm-mirror/llvm/archive/7a7e03f906aada0cf4b749b51213fe5784eeff84.tar.gz -> llvm-7a7e03f906aada0cf4b749b51213fe5784eeff84.tar.gz
-	cuda? (
-		https://github.com/nvidia/nccl/archive/0ceaec9cee96ae7658aa45686853286651f36384.tar.gz -> nvidia-nccl-0ceaec9cee96ae7658aa45686853286651f36384.tar.gz
-		https://github.com/NVlabs/cub/archive/1.8.0.zip -> cub-1.8.0.zip
-	)
-	python? (
-		https://github.com/intel/ARM_NEON_2_x86_SSE/archive/1200fe90bb174a6224a525ee60148671a786a71f.tar.gz -> ARM_NEON_2_x86_SSE-1200fe90bb174a6224a525ee60148671a786a71f.tar.gz
-		https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt
-		https://pypi.python.org/packages/bc/cc/3cdb0a02e7e96f6c70bd971bc8a90b8463fda83e264fa9c5c1c98ceabd81/backports.weakref-1.0rc1.tar.gz
-	)"
-
-SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
-		${bazel_external_uris}"
-
-RDEPEND="
-	app-arch/snappy
-	dev-db/lmdb
-	dev-db/sqlite
-	dev-libs/double-conversion
-	dev-libs/icu
-	~dev-libs/jsoncpp-1.9.1
-	dev-libs/libpcre
-	dev-libs/nsync
-	dev-libs/openssl:0=
-	>=dev-libs/protobuf-3.6.1:=
-	>=dev-libs/re2-0.2018.04.01
-	media-libs/giflib
-	media-libs/libjpeg-turbo
-	media-libs/libpng:0
-	>=net-libs/grpc-1.22.0
-	net-misc/curl
-	sys-libs/zlib
-	>=sys-apps/hwloc-2
-	cuda? (
-		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
-		dev-libs/cudnn
-	)
-	mpi? ( virtual/mpi )
-	python? (
-		${PYTHON_DEPS}
-		>=dev-libs/flatbuffers-1.8.0
-		dev-python/absl-py[${PYTHON_USEDEP}]
-		>=dev-python/astor-0.7.1[${PYTHON_USEDEP}]
-		dev-python/gast[${PYTHON_USEDEP}]
-		>=dev-python/numpy-1.16[${PYTHON_USEDEP}]
-		dev-python/google-pasta[${PYTHON_USEDEP}]
-		dev-python/opt-einsum[${PYTHON_USEDEP}]
-		>=dev-python/protobuf-python-3.6.1[${PYTHON_USEDEP}]
-		dev-python/six[${PYTHON_USEDEP}]
-		dev-python/termcolor[${PYTHON_USEDEP}]
-		>=dev-python/grpcio-1.22.0[${PYTHON_USEDEP}]
-		>=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
-		>=net-libs/google-cloud-cpp-0.10.0
-		>=sci-libs/keras-applications-1.0.8[${PYTHON_USEDEP}]
-		>=sci-libs/keras-preprocessing-1.0.5[${PYTHON_USEDEP}]
-		=sci-visualization/tensorboard-1*[${PYTHON_USEDEP}]
-	)"
-DEPEND="${RDEPEND}
-	dev-python/mock"
-PDEPEND="python? (
-		=sci-libs/tensorflow-estimator-1*[${PYTHON_USEDEP}]
-	)"
-BDEPEND="
-	app-arch/unzip
-	>=dev-libs/protobuf-3.6.0
-	dev-java/java-config
-	dev-python/mock
-	dev-lang/swig
-	dev-python/cython
-	|| (
-		=dev-util/bazel-0.24*
-		=dev-util/bazel-0.27*
-	)
-	cuda? (
-		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
-	)
-	!python? ( dev-lang/python )
-	python? (
-		>=dev-python/grpcio-tools-1.22.0
-	)"
-REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
-
-S="${WORKDIR}/${MY_P}"
-
-PATCHES=(
-	"${FILESDIR}/tensorflow-1.15.0_rc0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch"
-	"${FILESDIR}/tensorflow-1.15.0_rc0-0002-systemlibs-unbundle-functools32.patch"
-)
-DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
-CHECKREQS_MEMORY="5G"
-CHECKREQS_DISK_BUILD="5G"
-
-get-cpu-flags() {
-	local i f=()
-	# Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
-	for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
-		use cpu_flags_x86_${i} && f+=( -m${i/_/.} )
-	done
-	use cpu_flags_x86_fma3 && f+=( -mfma )
-	echo "${f[*]}"
-}
-
-pkg_setup() {
-	local num_pythons_enabled
-	num_pythons_enabled=0
-	count_impls(){
-		num_pythons_enabled=$((${num_pythons_enabled} + 1))
-	}
-	use python && python_foreach_impl count_impls
-
-	# 5 G to build C/C++ libs, 5G per python impl
-	CHECKREQS_DISK_BUILD="$((5 + 5 * $num_pythons_enabled))G"
-	check-reqs_pkg_setup
-}
-
-src_unpack() {
-	# Only unpack the main distfile
-	unpack "${P}.tar.gz"
-	bazel_load_distfiles "${bazel_external_uris}"
-}
-
-src_prepare() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	append-flags $(get-cpu-flags)
-	bazel_setup_bazelrc
-
-	if ver_test "$(cuda_toolkit_version)" -ge "10.2"; then
-		eapply "${FILESDIR}/tensorflow-2.1.0-cuda_10.2_support_bin2c.patch"
-	fi
-
-	default
-	use python && python_copy_sources
-
-	use cuda && cuda_add_sandbox
-}
-
-src_configure() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	do_configure() {
-		export CC_OPT_FLAGS=" "
-		export TF_ENABLE_XLA=$(usex xla 1 0)
-		export TF_NEED_OPENCL_SYCL=0
-		export TF_NEED_OPENCL=0
-		export TF_NEED_COMPUTECPP=0
-		export TF_NEED_ROCM=0
-		export TF_NEED_MPI=$(usex mpi 1 0)
-		export TF_SET_ANDROID_WORKSPACE=0
-
-		if use python; then
-			export PYTHON_BIN_PATH="${PYTHON}"
-			export PYTHON_LIB_PATH="$(python_get_sitedir)"
-		else
-			export PYTHON_BIN_PATH="$(which python)"
-			export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"
-		fi
-
-		export TF_NEED_CUDA=$(usex cuda 1 0)
-		export TF_DOWNLOAD_CLANG=0
-		export TF_CUDA_CLANG=0
-		export TF_NEED_TENSORRT=0
-		if use cuda; then
-			export TF_CUDA_PATHS="${EPREFIX}/opt/cuda"
-			export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"
-			export TF_CUDA_VERSION="$(cuda_toolkit_version)"
-			export TF_CUDNN_VERSION="$(cuda_cudnn_version)"
-			einfo "Setting CUDA version: $TF_CUDA_VERSION"
-			einfo "Setting CUDNN version: $TF_CUDNN_VERSION"
-
-			if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then
-				ewarn "WARNING: Tensorflow is being built with its default CUDA compute capabilities: 3.5 and 7.0."
-				ewarn "These may not be optimal for your GPU."
-				ewarn ""
-				ewarn "To configure Tensorflow with the CUDA compute capability that is optimal for your GPU,"
-				ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow."
-				ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5"
-				ewarn ""
-				ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus"
-				ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'"
-			fi
-		fi
-
-		local SYSLIBS=(
-			absl_py
-			astor_archive
-			boringssl
-			com_github_googleapis_googleapis
-			com_github_googlecloudplatform_google_cloud_cpp
-			com_google_protobuf
-			com_googlesource_code_re2
-			curl
-			cython
-			double_conversion
-			enum34_archive
-			flatbuffers
-			functools32_archive
-			gast_archive
-			gif_archive
-			grpc
-			hwloc
-			icu
-			jpeg
-			jsoncpp_git
-			keras_applications_archive
-			lmdb
-			nasm
-			nsync
-			opt_einsum_archive
-			org_sqlite
-			pasta
-			pcre
-			png_archive
-			six_archive
-			snappy
-			swig
-			termcolor_archive
-			wrapt
-			zlib_archive
-		)
-
-		export TF_SYSTEM_LIBS="${SYSLIBS[@]}"
-		export TF_IGNORE_MAX_BAZEL_VERSION=1
-
-		# This is not autoconf
-		./configure || die
-
-		echo 'build --config=noaws --config=nohdfs --config=noignite --config=nokafka' >> .bazelrc || die
-		echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
-		echo 'build --incompatible_no_support_tools_in_action_inputs=false' >> .bazelrc || die
-	}
-	if use python; then
-		python_foreach_impl run_in_build_dir do_configure
-	else
-		do_configure
-	fi
-}
-
-src_compile() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	if use python; then
-		python_setup
-		BUILD_DIR="${S}-${EPYTHON/./_}"
-		cd "${BUILD_DIR}"
-	fi
-
-	# fail early if any deps are missing
-	ebazel build --nobuild \
-		//tensorflow:libtensorflow_framework.so \
-		//tensorflow:libtensorflow.so \
-		//tensorflow:libtensorflow_cc.so \
-		$(usex python '//tensorflow/tools/pip_package:build_pip_package' '')
-
-	ebazel build \
-		//tensorflow:libtensorflow_framework.so \
-		//tensorflow:libtensorflow.so
-	ebazel build //tensorflow:libtensorflow_cc.so
-
-	do_compile() {
-		ebazel build //tensorflow/tools/pip_package:build_pip_package
-	}
-	BUILD_DIR="${S}"
-	cd "${BUILD_DIR}"
-	use python && python_foreach_impl run_in_build_dir do_compile
-	ebazel shutdown
-}
-
-src_install() {
-	local i j
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	do_install() {
-		einfo "Installing ${EPYTHON} files"
-		local srcdir="${T}/src-${MULTIBUILD_VARIANT}"
-		mkdir -p "${srcdir}" || die
-		bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
-		cd "${srcdir}" || die
-		esetup.py install
-
-		# libtensorflow_framework.so is in /usr/lib already
-		rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_framework.so* || die
-		rm -f "${D}/$(python_get_sitedir)"/${PN}_core/lib${PN}_framework.so* || die
-		python_optimize
-	}
-
-	if use python; then
-		python_foreach_impl run_in_build_dir do_install
-
-		# Symlink to python-exec scripts
-		for i in "${ED}"/usr/lib/python-exec/*/*; do
-			n="${i##*/}"
-			[[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"
-		done
-
-		python_setup
-		local BUILD_DIR="${S}-${EPYTHON/./_}"
-		cd "${BUILD_DIR}" || die
-	fi
-
-	einfo "Installing headers"
-	ebazel build //tensorflow:install_headers
-	ebazel shutdown
-	insinto /usr/include/${PN}/
-	doins -r bazel-genfiles/tensorflow/include/*
-
-	einfo "Installing libs"
-	# Generate pkg-config file
-	${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
-	insinto /usr/$(get_libdir)/pkgconfig
-	doins ${PN}.pc ${PN}_cc.pc
-
-	for l in libtensorflow{,_framework,_cc}.so; do
-		dolib.so bazel-bin/tensorflow/${l}
-		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1)
-		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3)
-	done
-
-	einstalldocs
-}

diff --git a/sci-libs/tensorflow/tensorflow-2.0.0.ebuild b/sci-libs/tensorflow/tensorflow-2.0.0.ebuild
deleted file mode 100644
index 4e04d4e51c6..00000000000
--- a/sci-libs/tensorflow/tensorflow-2.0.0.ebuild
+++ /dev/null
@@ -1,359 +0,0 @@
-# Copyright 1999-2020 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=7
-
-DISTUTILS_OPTIONAL=1
-PYTHON_COMPAT=( python{3_6,3_7} )
-MY_PV=${PV/_rc/-rc}
-MY_P=${PN}-${MY_PV}
-
-inherit bazel check-reqs cuda distutils-r1 flag-o-matic toolchain-funcs
-
-DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
-HOMEPAGE="https://www.tensorflow.org/"
-
-LICENSE="Apache-2.0"
-SLOT="0"
-KEYWORDS="~amd64"
-IUSE="cuda mpi +python xla"
-CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
-for i in $CPU_USE_FLAGS_X86; do
-	IUSE+=" cpu_flags_x86_$i"
-done
-
-# distfiles that bazel uses for the workspace, will be copied to basel-distdir
-bazel_external_uris="
-	https://storage.googleapis.com/mirror.tensorflow.org/www.kurims.kyoto-u.ac.jp/~ooura/fft2d.tgz -> oourafft2d-20061228.tgz
-	https://bitbucket.org/eigen/eigen/get/049af2f56331.tar.gz -> eigen-049af2f56331.tar.gz
-	https://github.com/abseil/abseil-cpp/archive/43ef2148c0936ebf7cb4be6b19927a9d9d145b8f.tar.gz -> abseil-cpp-43ef2148c0936ebf7cb4be6b19927a9d9d145b8f.tar.gz
-	https://github.com/bazelbuild/bazel-skylib/releases/download/0.8.0/bazel-skylib.0.8.0.tar.gz
-	https://github.com/bazelbuild/bazel-toolchains/archive/92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz -> bazel-toolchains-92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz
-	https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz
-	https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz
-	https://github.com/bazelbuild/rules_swift/releases/download/0.11.1/rules_swift.0.11.1.tar.gz -> bazelbuild-rules_swift.0.11.1.tar.gz
-	https://github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz -> farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz
-	https://github.com/google/gemmlowp/archive/12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip -> gemmlowp-12fed0cd7cfcd9e169bf1925bc3a7a58725fdcc3.zip
-	https://github.com/google/highwayhash/archive/fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz -> highwayhash-fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz
-	https://github.com/mborgerding/kissfft/archive/36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz -> kissfft-36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz
-	https://github.com/nlopezgi/bazel-toolchains/archive/94d31935a2c94fe7e7c7379a0f3393e181928ff7.tar.gz -> bazel-toolchains-94d31935a2c94fe7e7c7379a0f3393e181928ff7.tar.gz
-	https://github.com/pybind/pybind11/archive/v2.3.0.tar.gz -> pybind11-v2.3.0.tar.gz
-	https://github.com/llvm-mirror/llvm/archive/b7d166cebcf619a3691eed3f994384aab3d80fa6.tar.gz -> llvm-b7d166cebcf619a3691eed3f994384aab3d80fa6.tar.gz
-	cuda? (
-		https://github.com/nvidia/nccl/archive/0ceaec9cee96ae7658aa45686853286651f36384.tar.gz -> nvidia-nccl-0ceaec9cee96ae7658aa45686853286651f36384.tar.gz
-		https://github.com/NVlabs/cub/archive/1.8.0.zip -> cub-1.8.0.zip
-	)
-	python? (
-		https://github.com/intel/ARM_NEON_2_x86_SSE/archive/1200fe90bb174a6224a525ee60148671a786a71f.tar.gz -> ARM_NEON_2_x86_SSE-1200fe90bb174a6224a525ee60148671a786a71f.tar.gz
-		https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt
-		https://pypi.python.org/packages/bc/cc/3cdb0a02e7e96f6c70bd971bc8a90b8463fda83e264fa9c5c1c98ceabd81/backports.weakref-1.0rc1.tar.gz
-	)"
-
-SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
-		${bazel_external_uris}"
-
-RDEPEND="
-	app-arch/snappy
-	dev-db/lmdb
-	dev-db/sqlite
-	dev-libs/double-conversion
-	dev-libs/icu
-	~dev-libs/jsoncpp-1.9.1
-	dev-libs/libpcre
-	dev-libs/nsync
-	dev-libs/openssl:0=
-	>=dev-libs/protobuf-3.6.1:=
-	>=dev-libs/re2-0.2018.04.01
-	media-libs/giflib
-	media-libs/libjpeg-turbo
-	media-libs/libpng:0
-	>=net-libs/grpc-1.22.0
-	net-misc/curl
-	sys-libs/zlib
-	>=sys-apps/hwloc-2
-	cuda? (
-		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
-		dev-libs/cudnn
-	)
-	mpi? ( virtual/mpi )
-	python? (
-		${PYTHON_DEPS}
-		>=dev-libs/flatbuffers-1.8.0
-		dev-python/absl-py[${PYTHON_USEDEP}]
-		>=dev-python/astor-0.7.1[${PYTHON_USEDEP}]
-		dev-python/gast[${PYTHON_USEDEP}]
-		>=dev-python/numpy-1.16[${PYTHON_USEDEP}]
-		dev-python/google-pasta[${PYTHON_USEDEP}]
-		dev-python/opt-einsum[${PYTHON_USEDEP}]
-		>=dev-python/protobuf-python-3.6.1[${PYTHON_USEDEP}]
-		dev-python/six[${PYTHON_USEDEP}]
-		dev-python/termcolor[${PYTHON_USEDEP}]
-		>=dev-python/grpcio-1.22.0[${PYTHON_USEDEP}]
-		>=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
-		>=net-libs/google-cloud-cpp-0.10.0
-		>=sci-libs/keras-applications-1.0.8[${PYTHON_USEDEP}]
-		>=sci-libs/keras-preprocessing-1.0.5[${PYTHON_USEDEP}]
-		>=sci-visualization/tensorboard-2.0.0[${PYTHON_USEDEP}]
-	)"
-DEPEND="${RDEPEND}
-	dev-python/mock"
-PDEPEND="python? (
-		>=sci-libs/tensorflow-estimator-2.0.0[${PYTHON_USEDEP}]
-	)"
-BDEPEND="
-	app-arch/unzip
-	>=dev-libs/protobuf-3.6.0
-	dev-java/java-config
-	dev-python/mock
-	dev-lang/swig
-	dev-python/cython
-	|| (
-		=dev-util/bazel-0.24*
-		=dev-util/bazel-0.27*
-	)
-	cuda? (
-		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
-	)
-	!python? ( dev-lang/python )
-	python? (
-		>=dev-python/grpcio-tools-1.22.0
-	)"
-REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
-
-S="${WORKDIR}/${MY_P}"
-
-PATCHES=(
-	"${FILESDIR}/tensorflow-1.15.0_rc0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch"
-)
-DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
-CHECKREQS_MEMORY="5G"
-CHECKREQS_DISK_BUILD="5G"
-
-get-cpu-flags() {
-	local i f=()
-	# Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
-	for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
-		use cpu_flags_x86_${i} && f+=( -m${i/_/.} )
-	done
-	use cpu_flags_x86_fma3 && f+=( -mfma )
-	echo "${f[*]}"
-}
-
-pkg_setup() {
-	ewarn "TensorFlow 2.0 is a major release that contains some incompatibilities"
-	ewarn "with TensorFlow 1.x. For more information about migrating to TF2.0 see:"
-	ewarn "https://www.tensorflow.org/guide/migrate"
-
-	local num_pythons_enabled
-	num_pythons_enabled=0
-	count_impls(){
-		num_pythons_enabled=$((${num_pythons_enabled} + 1))
-	}
-	use python && python_foreach_impl count_impls
-
-	# 5G to build C/C++ libs, 5G per python impl
-	CHECKREQS_DISK_BUILD="$((5 + 5 * ${num_pythons_enabled}))G"
-	check-reqs_pkg_setup
-}
-
-src_unpack() {
-	# Only unpack the main distfile
-	unpack "${P}.tar.gz"
-	bazel_load_distfiles "${bazel_external_uris}"
-}
-
-src_prepare() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	append-flags $(get-cpu-flags)
-	bazel_setup_bazelrc
-
-	default
-	use python && python_copy_sources
-
-	use cuda && cuda_add_sandbox
-}
-
-src_configure() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	do_configure() {
-		export CC_OPT_FLAGS=" "
-		export TF_ENABLE_XLA=$(usex xla 1 0)
-		export TF_NEED_OPENCL_SYCL=0
-		export TF_NEED_OPENCL=0
-		export TF_NEED_COMPUTECPP=0
-		export TF_NEED_ROCM=0
-		export TF_NEED_MPI=$(usex mpi 1 0)
-		export TF_SET_ANDROID_WORKSPACE=0
-
-		if use python; then
-			export PYTHON_BIN_PATH="${PYTHON}"
-			export PYTHON_LIB_PATH="$(python_get_sitedir)"
-		else
-			export PYTHON_BIN_PATH="$(which python)"
-			export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"
-		fi
-
-		export TF_NEED_CUDA=$(usex cuda 1 0)
-		export TF_DOWNLOAD_CLANG=0
-		export TF_CUDA_CLANG=0
-		export TF_NEED_TENSORRT=0
-		if use cuda; then
-			export TF_CUDA_PATHS="${EPREFIX}/opt/cuda"
-			export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"
-			export TF_CUDA_VERSION="$(cuda_toolkit_version)"
-			export TF_CUDNN_VERSION="$(cuda_cudnn_version)"
-			einfo "Setting CUDA version: $TF_CUDA_VERSION"
-			einfo "Setting CUDNN version: $TF_CUDNN_VERSION"
-
-			if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then
-				ewarn "WARNING: Tensorflow is being built with its default CUDA compute capabilities: 3.5 and 7.0."
-				ewarn "These may not be optimal for your GPU."
-				ewarn ""
-				ewarn "To configure Tensorflow with the CUDA compute capability that is optimal for your GPU,"
-				ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow."
-				ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5"
-				ewarn ""
-				ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus"
-				ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'"
-			fi
-		fi
-
-		local SYSLIBS=(
-			absl_py
-			astor_archive
-			boringssl
-			com_github_googleapis_googleapis
-			com_github_googlecloudplatform_google_cloud_cpp
-			com_google_protobuf
-			com_googlesource_code_re2
-			curl
-			cython
-			double_conversion
-			enum34_archive
-			flatbuffers
-			functools32_archive
-			gast_archive
-			gif_archive
-			grpc
-			hwloc
-			icu
-			jpeg
-			jsoncpp_git
-			keras_applications_archive
-			lmdb
-			nasm
-			nsync
-			opt_einsum_archive
-			org_sqlite
-			pasta
-			pcre
-			png_archive
-			six_archive
-			snappy
-			swig
-			termcolor_archive
-			wrapt
-			zlib_archive
-		)
-
-		export TF_SYSTEM_LIBS="${SYSLIBS[@]}"
-		export TF_IGNORE_MAX_BAZEL_VERSION=1
-
-		# This is not autoconf
-		./configure || die
-
-		echo 'build --config=noaws --config=nohdfs --config=noignite --config=nokafka' >> .bazelrc || die
-		echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
-		echo 'build --incompatible_no_support_tools_in_action_inputs=false' >> .bazelrc || die
-	}
-	if use python; then
-		python_foreach_impl run_in_build_dir do_configure
-	else
-		do_configure
-	fi
-}
-
-src_compile() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	if use python; then
-		python_setup
-		BUILD_DIR="${S}-${EPYTHON/./_}"
-		cd "${BUILD_DIR}"
-	fi
-
-	# fail early if any deps are missing
-	ebazel build --nobuild \
-		//tensorflow:libtensorflow_framework.so \
-		//tensorflow:libtensorflow.so \
-		//tensorflow:libtensorflow_cc.so \
-		$(usex python '//tensorflow/tools/pip_package:build_pip_package' '')
-
-	ebazel build \
-		//tensorflow:libtensorflow_framework.so \
-		//tensorflow:libtensorflow.so
-	ebazel build //tensorflow:libtensorflow_cc.so
-
-	do_compile() {
-		ebazel build //tensorflow/tools/pip_package:build_pip_package
-	}
-	BUILD_DIR="${S}"
-	cd "${BUILD_DIR}"
-	use python && python_foreach_impl run_in_build_dir do_compile
-	ebazel shutdown
-}
-
-src_install() {
-	local i j
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	do_install() {
-		einfo "Installing ${EPYTHON} files"
-		local srcdir="${T}/src-${MULTIBUILD_VARIANT}"
-		mkdir -p "${srcdir}" || die
-		bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
-		cd "${srcdir}" || die
-		esetup.py install
-
-		# libtensorflow_framework.so is in /usr/lib already
-		rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_framework.so* || die
-		rm -f "${D}/$(python_get_sitedir)"/${PN}_core/lib${PN}_framework.so* || die
-		python_optimize
-	}
-
-	if use python; then
-		python_foreach_impl run_in_build_dir do_install
-
-		# Symlink to python-exec scripts
-		for i in "${ED}"/usr/lib/python-exec/*/*; do
-			n="${i##*/}"
-			[[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"
-		done
-
-		python_setup
-		local BUILD_DIR="${S}-${EPYTHON/./_}"
-		cd "${BUILD_DIR}" || die
-	fi
-
-	einfo "Installing headers"
-	ebazel build //tensorflow:install_headers
-	ebazel shutdown
-	insinto /usr/include/${PN}/
-	doins -r bazel-genfiles/tensorflow/include/*
-
-	einfo "Installing libs"
-	# Generate pkg-config file
-	${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
-	insinto /usr/$(get_libdir)/pkgconfig
-	doins ${PN}.pc ${PN}_cc.pc
-
-	for l in libtensorflow{,_framework,_cc}.so; do
-		dolib.so bazel-bin/tensorflow/${l}
-		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1)
-		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3)
-	done
-
-	einstalldocs
-}


^ permalink raw reply related	[flat|nested] 11+ messages in thread

* [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/
@ 2021-10-25  1:11 Jason Zaman
  0 siblings, 0 replies; 11+ messages in thread
From: Jason Zaman @ 2021-10-25  1:11 UTC (permalink / raw
  To: gentoo-commits

commit:     3ee1f4fa9a7bae90ab9452aa9570775cc7c15f00
Author:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
AuthorDate: Sun Oct 24 21:34:06 2021 +0000
Commit:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
CommitDate: Mon Oct 25 01:08:44 2021 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=3ee1f4fa

sci-libs/tensorflow: Fix build with >=CUDA-11.3

Closes: https://bugs.gentoo.org/815244
Package-Manager: Portage-3.0.20, Repoman-3.0.3
Signed-off-by: Jason Zaman <perfinion <AT> gentoo.org>

 sci-libs/tensorflow/Manifest                       |   3 +-
 .../files/0008-patch-ruy-for-gcc-11.patch          |  37 --
 sci-libs/tensorflow/tensorflow-2.5.0-r1.ebuild     | 413 ---------------------
 ...-2.5.0-r2.ebuild => tensorflow-2.5.0-r3.ebuild} |   8 +-
 4 files changed, 5 insertions(+), 456 deletions(-)

diff --git a/sci-libs/tensorflow/Manifest b/sci-libs/tensorflow/Manifest
index 7fe3a6d8b67..41d64c1052c 100644
--- a/sci-libs/tensorflow/Manifest
+++ b/sci-libs/tensorflow/Manifest
@@ -31,6 +31,5 @@ DIST ruy-54774a7a2cf85963777289193629d4bd42de4a59.zip 363561 BLAKE2B 81ff61f1a12
 DIST sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz 2569603 BLAKE2B cd66dc8a0b4ad3ea586a79ef588c8d68e4f05b6ea89e6d8119b5ee338b9046c11a747ca57c2f80a3c90fab119c05e85f5965571c9e522ccb8a312b95804d1a36 SHA512 b819d050bb38246b503f1eb3d6e7d878d0f166f3602ae3c327749b1f6ddc2b1d6ac7f768b1f1d055b98b7b98812e4643a75bcebc9728c7f615d67ca739761b3e
 DIST tensorflow-1.15.0-python-license.rst.txt 45132 BLAKE2B 8a8ace4b64fd9eda1932272704dd4fea002d2562cd9c2c94470d3ff5de58bb171ff849d3f998ea90cd779d1084d39af1267aca8400b74b701205d2e76e6a2628 SHA512 c79c42f4ceecf33c8f8778ca17b53595b75d5d0bdf30b11058cf01dd70a6351f530ca9dd846460aba14edc1d5876f7bf6fa0f9f49f7915590b0f54d267ce3856
 DIST tensorflow-2.5.0.tar.gz 55999506 BLAKE2B 0f1f29ed9ff6e5fd3fc0c98419bca636f48085810f934290885b3469c45817b2ce17e9dbd3d93d70b09ec7975088edd0973f147449f527a8f2e23b388e2592ce SHA512 637c63b1bed1c0eb7bb018f1ff7f29f7f0d78e75dac384df4ecb5dfb92bbcb28209e3d3d2204145abddf88e3247d8c31bbb4cea032a73b7122b2ef3eb0d2b947
-DIST tensorflow-patches-2.5.0-r1.tar.bz2 5180 BLAKE2B aafebb5cc4616ad93982e0fd8299f91c22063b96841b920a98a4cb54bcdc0cad51011f943ca62055239a0c21ed69d1f5b985710dcf0c28346b7ba12114583842 SHA512 38acd53136f3f1e061b71624cc5a1cefbdcb1d897b7728bc93462e8f52e998c3d72453ca9c94fa53dd28279cae5c591b7cbe9f61684784ff94c451a641524f31
-DIST tensorflow-patches-2.5.0-r2.tar.bz2 5538 BLAKE2B df289d77a79bb2143ebe26685f3c8f2efe64819a2fd6bd9866bc05638d2c19e822ddb90df3f342f1fc27827b3799531ad4ed280f9cc5291e75b1c2042a61603f SHA512 4f5ecb4efb56e735ee79dd07e0e8737f535da4e9ba8f63f769d548813e5aa7017e03cd1bd0d661e1958fa2916df7c7f25a7c0d141dbfe9716586ca82c021169f
+DIST tensorflow-patches-2.5.0-r3.tar.bz2 6385 BLAKE2B 5cd3a9639884aa67bbd192cb1b06cfedc557b8ad05aca0f4887987c615656620b9294de98b119bf9d6edafd5b56b9d5e6900f95adb06a3a49cfdf342c6195b22 SHA512 8f0d2be9ffed81aba5c85088a79ca56af889e0247516f51657d214d19d39877cfe331e60f63e8c54e2b12acd7a624b181e281534acbd9a103f6eec0aae70fd8a
 DIST tensorflow-toolchains-v1.1.10.tar.gz 137512 BLAKE2B 774b22a599070564c0f77ed0da79f6a96359b196dd3007370be885b796c236b9a29c01597507a3049f82cf489824c8016740b778cacb2b8cd859500f9ecb1887 SHA512 1daf4684f5b364a89b3cc475cf2d13d0d0622fc14d2b32d7ad8518932538e716782d755cfcedc98d129a3986bc7c38bfc7854c8ad596891e397510b75ae2f93e

diff --git a/sci-libs/tensorflow/files/0008-patch-ruy-for-gcc-11.patch b/sci-libs/tensorflow/files/0008-patch-ruy-for-gcc-11.patch
deleted file mode 100644
index 3bb2cf2e1cb..00000000000
--- a/sci-libs/tensorflow/files/0008-patch-ruy-for-gcc-11.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-diff --git a/third_party/ruy/commit-3c93cda.patch b/third_party/ruy/commit-3c93cda.patch
-new file mode 100644
-index 00000000000..e530a842717
---- /dev/null
-+++ b/third_party/ruy/commit-3c93cda.patch
-@@ -0,0 +1,21 @@
-+From 3c93cda8211efa01128d48950f0d6ee5233c5b9b Mon Sep 17 00:00:00 2001
-+From: stha09 <51720730+stha09@users.noreply.github.com>
-+Date: Thu, 6 May 2021 18:31:30 +0200
-+Subject: [PATCH] IWYU: include limits for std::numeric_limits (#253)
-+
-+---
-+ ruy/block_map.cc | 1 +
-+ 1 file changed, 1 insertion(+)
-+
-+diff --git a/ruy/block_map.cc b/ruy/block_map.cc
-+index 6c01e52a02..e04e7aff4a 100644
-+--- a/ruy/block_map.cc
-++++ b/ruy/block_map.cc
-+@@ -17,6 +17,7 @@ limitations under the License.
-+ 
-+ #include <algorithm>
-+ #include <cstdint>
-++#include <limits>
-+ 
-+ #ifdef RUY_MAKEBLOCKMAP_DEBUG
-+ #include <cstdio>
-diff --git a/third_party/ruy/workspace.bzl b/third_party/ruy/workspace.bzl
-index 50769621770..7e91d640469 100644
---- a/third_party/ruy/workspace.bzl
-+++ b/third_party/ruy/workspace.bzl
-@@ -12,4 +12,5 @@ def repo():
-             "https://github.com/google/ruy/archive/54774a7a2cf85963777289193629d4bd42de4a59.zip",
-         ],
-         build_file = "//third_party/ruy:BUILD",
-+        patch_file = "//third_party/ruy:commit-3c93cda.patch"
-     )

diff --git a/sci-libs/tensorflow/tensorflow-2.5.0-r1.ebuild b/sci-libs/tensorflow/tensorflow-2.5.0-r1.ebuild
deleted file mode 100644
index 5f4f341f0cb..00000000000
--- a/sci-libs/tensorflow/tensorflow-2.5.0-r1.ebuild
+++ /dev/null
@@ -1,413 +0,0 @@
-# Copyright 1999-2021 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=7
-
-DISTUTILS_OPTIONAL=1
-PYTHON_COMPAT=( python3_{7,8,9} )
-DISTUTILS_USE_SETUPTOOLS=rdepend
-MY_PV=${PV/_rc/-rc}
-MY_P=${PN}-${MY_PV}
-
-inherit bazel check-reqs cuda distutils-r1 flag-o-matic prefix toolchain-funcs
-
-DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
-HOMEPAGE="https://www.tensorflow.org/"
-
-LICENSE="Apache-2.0"
-SLOT="0"
-KEYWORDS="~amd64"
-IUSE="cuda mpi +python xla"
-CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
-for i in $CPU_USE_FLAGS_X86; do
-	IUSE+=" cpu_flags_x86_${i}"
-done
-
-# distfiles that bazel uses for the workspace, will be copied to basel-distdir
-bazel_external_uris="
-	https://github.com/abseil/abseil-cpp/archive/6f9d96a1f41439ac172ee2ef7ccd8edf0e5d068c.tar.gz -> abseil-cpp-6f9d96a1f41439ac172ee2ef7ccd8edf0e5d068c.tar.gz
-	https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz
-	https://github.com/bazelbuild/bazel-toolchains/archive/92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz -> bazel-toolchains-92dd8a7a518a2fb7ba992d47c8b38299fe0be825.tar.gz
-	https://github.com/bazelbuild/rules_android/archive/v0.1.1.zip -> bazelbuild-rules_android-v0.1.1.zip
-	https://github.com/bazelbuild/rules_cc/archive/01d4a48911d5e7591ecb1c06d3b8af47fe872371.zip -> bazelbuild-rules_cc-01d4a48911d5e7591ecb1c06d3b8af47fe872371.zip
-	https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz
-	https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz
-	https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip
-	https://github.com/bazelbuild/rules_proto/archive/97d8af4dc474595af3900dd85cb3a29ad28cc313.tar.gz -> bazelbuild-rules_proto-97d8af4dc474595af3900dd85cb3a29ad28cc313.tar.gz
-	https://github.com/bazelbuild/rules_python/releases/download/0.0.1/rules_python-0.0.1.tar.gz -> bazelbuild-rules_python-0.0.1.tar.gz
-	https://github.com/bazelbuild/rules_swift/archive/3eeeb53cebda55b349d64c9fc144e18c5f7c0eb8.tar.gz -> bazelbuild-rules_swift-3eeeb53cebda55b349d64c9fc144e18c5f7c0eb8.tar.gz
-	https://github.com/dmlc/dlpack/archive/3efc489b55385936531a06ff83425b719387ec63.tar.gz -> dlpack-3efc489b55385936531a06ff83425b719387ec63.tar.gz
-	https://github.com/google/farmhash/archive/816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz -> farmhash-816a4ae622e964763ca0862d9dbd19324a1eaf45.tar.gz
-	https://github.com/google/gemmlowp/archive/fda83bdc38b118cc6b56753bd540caa49e570745.zip -> gemmlowp-fda83bdc38b118cc6b56753bd540caa49e570745.zip
-	https://github.com/google/highwayhash/archive/fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz -> highwayhash-fd3d9af80465e4383162e4a7c5e2f406e82dd968.tar.gz
-	https://github.com/google/re2/archive/506cfa4bffd060c06ec338ce50ea3468daa6c814.tar.gz -> re2-506cfa4bffd060c06ec338ce50ea3468daa6c814.tar.gz
-	https://github.com/google/ruy/archive/54774a7a2cf85963777289193629d4bd42de4a59.zip -> ruy-54774a7a2cf85963777289193629d4bd42de4a59.zip
-	https://github.com/joe-kuo/sobol_data/archive/835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz -> sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz
-	https://github.com/llvm/llvm-project/archive/1f6a57c1a0fad922e04a2b1f414b092d4b0cd8b0.tar.gz -> llvm-1f6a57c1a0fad922e04a2b1f414b092d4b0cd8b0.tar.gz
-	https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/openmp-10.0.1.src.tar.xz -> llvmorg-10.0.1-openmp-10.0.1.src.tar.xz
-	https://github.com/mborgerding/kissfft/archive/36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz -> kissfft-36dbc057604f00aacfc0288ddad57e3b21cfc1b8.tar.gz
-	https://github.com/oneapi-src/oneDNN/archive/v2.2.tar.gz -> oneDNN-v2.2.tar.gz
-	https://github.com/petewarden/OouraFFT/archive/v1.0.tar.gz -> OouraFFT-v1.0.tar.gz
-	https://github.com/pytorch/cpuinfo/archive/5916273f79a21551890fd3d56fc5375a78d1598d.zip -> pytorch-cpuinfo-5916273f79a21551890fd3d56fc5375a78d1598d.zip
-	https://github.com/pytorch/cpuinfo/archive/d5e37adf1406cf899d7d9ec1d317c47506ccb970.tar.gz -> pytorch-cpuinfo-d5e37adf1406cf899d7d9ec1d317c47506ccb970.tar.gz
-	https://github.com/tensorflow/toolchains/archive/v1.1.10.tar.gz -> tensorflow-toolchains-v1.1.10.tar.gz
-	https://gitlab.com/libeigen/eigen/-/archive/f612df273689a19d25b45ca4f8269463207c4fee/eigen-f612df273689a19d25b45ca4f8269463207c4fee.tar.gz
-	cuda? (
-		https://github.com/NVIDIA/cudnn-frontend/archive/360d6e7164dfb7c802493fd1c0464f0d815b852a.zip -> cudnn-frontend-360d6e7164dfb7c802493fd1c0464f0d815b852a.zip
-		https://github.com/NVlabs/cub/archive/1.9.9.zip -> cub-1.9.9.zip
-		https://github.com/nvidia/nccl/archive/v2.8.3-1.tar.gz -> nvidia-nccl-v2.8.3-1.tar.gz
-	)
-	python? (
-		https://github.com/intel/ARM_NEON_2_x86_SSE/archive/1200fe90bb174a6224a525ee60148671a786a71f.tar.gz -> ARM_NEON_2_x86_SSE-1200fe90bb174a6224a525ee60148671a786a71f.tar.gz
-		https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt
-		https://pypi.python.org/packages/bc/cc/3cdb0a02e7e96f6c70bd971bc8a90b8463fda83e264fa9c5c1c98ceabd81/backports.weakref-1.0rc1.tar.gz
-	)"
-
-SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
-		https://dev.gentoo.org/~perfinion/patches/tensorflow-patches-${PVR}.tar.bz2
-		${bazel_external_uris}"
-
-RDEPEND="
-	app-arch/snappy
-	dev-db/lmdb
-	dev-db/sqlite
-	dev-libs/double-conversion
-	dev-libs/icu:=
-	>=dev-libs/jsoncpp-1.9.2
-	dev-libs/libpcre
-	dev-libs/nsync
-	dev-libs/openssl:0=
-	>=dev-libs/protobuf-3.13.0:=
-	>=dev-libs/re2-0.2019.06.01:=
-	media-libs/giflib
-	media-libs/libjpeg-turbo
-	media-libs/libpng:0
-	>=net-libs/grpc-1.28:=
-	net-misc/curl
-	sys-libs/zlib
-	>=sys-apps/hwloc-2
-	cuda? (
-		=dev-util/nvidia-cuda-toolkit-11*[profiler]
-		=dev-libs/cudnn-8*
-	)
-	mpi? ( virtual/mpi )
-	python? (
-		${PYTHON_DEPS}
-		>=dev-libs/flatbuffers-1.12.0:=
-		dev-python/absl-py[${PYTHON_USEDEP}]
-		>=dev-python/astor-0.7.1[${PYTHON_USEDEP}]
-		dev-python/astunparse[${PYTHON_USEDEP}]
-		dev-python/dill[${PYTHON_USEDEP}]
-		dev-python/flatbuffers[${PYTHON_USEDEP}]
-		>=dev-python/gast-0.3.3[${PYTHON_USEDEP}]
-		dev-python/h5py[${PYTHON_USEDEP}]
-		>=dev-python/numpy-1.19[${PYTHON_USEDEP}]
-		>=dev-python/google-pasta-0.1.8[${PYTHON_USEDEP}]
-		>=dev-python/opt-einsum-3.3.0[${PYTHON_USEDEP}]
-		>=dev-python/protobuf-python-3.13.0[${PYTHON_USEDEP}]
-		dev-python/pybind11[${PYTHON_USEDEP}]
-		dev-python/six[${PYTHON_USEDEP}]
-		dev-python/tblib[${PYTHON_USEDEP}]
-		dev-python/termcolor[${PYTHON_USEDEP}]
-		dev-python/typing-extensions[${PYTHON_USEDEP}]
-		>=dev-python/grpcio-1.28[${PYTHON_USEDEP}]
-		>=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
-		>=net-libs/google-cloud-cpp-0.10.0
-		>=sci-libs/keras-applications-1.0.8[${PYTHON_USEDEP}]
-		>=sci-libs/keras-preprocessing-1.1.2[${PYTHON_USEDEP}]
-		>=sci-visualization/tensorboard-2.5.0[${PYTHON_USEDEP}]
-	)"
-DEPEND="${RDEPEND}
-	python? (
-		dev-python/mock
-		dev-python/setuptools
-	)"
-PDEPEND="python? (
-		>=sci-libs/tensorflow-estimator-2.5.0[${PYTHON_USEDEP}]
-	)"
-BDEPEND="
-	app-arch/unzip
-	>=dev-libs/protobuf-3.8.0
-	dev-java/java-config
-	>=dev-util/bazel-3.7.2
-	cuda? (
-		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
-	)
-	!python? ( dev-lang/python )
-	python? (
-		dev-python/cython
-		dev-python/mock
-		>=dev-python/grpcio-tools-1.28
-	)"
-REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
-
-S="${WORKDIR}/${MY_P}"
-
-PATCHES=(
-	"${FILESDIR}/0008-patch-ruy-for-gcc-11.patch"
-)
-DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
-CHECKREQS_MEMORY="5G"
-CHECKREQS_DISK_BUILD="10G"
-
-get-cpu-flags() {
-	local i f=()
-	# Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
-	for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
-		use cpu_flags_x86_${i} && f+=( -m${i/_/.} )
-	done
-	use cpu_flags_x86_fma3 && f+=( -mfma )
-	echo "${f[*]}"
-}
-
-pkg_setup() {
-	ewarn "TensorFlow 2.0 is a major release that contains some incompatibilities"
-	ewarn "with TensorFlow 1.x. For more information about migrating to TF2.0 see:"
-	ewarn "https://www.tensorflow.org/guide/migrate"
-
-	local num_pythons_enabled
-	num_pythons_enabled=0
-	count_impls() {
-		num_pythons_enabled=$((${num_pythons_enabled} + 1))
-	}
-	use python && python_foreach_impl count_impls
-
-	# 10G to build C/C++ libs, 5G per python impl
-	CHECKREQS_DISK_BUILD="$((10 + 6 * ${num_pythons_enabled}))G"
-	check-reqs_pkg_setup
-}
-
-src_unpack() {
-	# Only unpack the main distfile
-	unpack "${P}.tar.gz"
-	unpack tensorflow-patches-${PVR}.tar.bz2
-	bazel_load_distfiles "${bazel_external_uris}"
-}
-
-src_prepare() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-
-	append-flags $(get-cpu-flags)
-	append-cxxflags -std=c++14 # bug 787938
-	filter-flags '-fvtable-verify=@(std|preinit)'
-	bazel_setup_bazelrc
-
-	eapply "${WORKDIR}"/patches/*.patch
-
-	# Relax version checks in setup.py
-	sed -i "/^    '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die
-	sed -i "/config_googleapis/d" tensorflow/workspace0.bzl || die
-
-	# Prefixify hard-coded command locations
-	hprefixify -w /host_compiler_prefix/ third_party/gpus/cuda_configure.bzl
-
-	default
-	use python && python_copy_sources
-
-	use cuda && cuda_add_sandbox
-}
-
-src_configure() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-	export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
-
-	do_configure() {
-		export CC_OPT_FLAGS=" "
-		export TF_ENABLE_XLA=$(usex xla 1 0)
-		export TF_NEED_OPENCL_SYCL=0
-		export TF_NEED_OPENCL=0
-		export TF_NEED_COMPUTECPP=0
-		export TF_NEED_ROCM=0
-		export TF_NEED_MPI=$(usex mpi 1 0)
-		export TF_SET_ANDROID_WORKSPACE=0
-
-		if use python; then
-			export PYTHON_BIN_PATH="${PYTHON}"
-			export PYTHON_LIB_PATH="$(python_get_sitedir)"
-		else
-			export PYTHON_BIN_PATH="$(which python)"
-			export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"
-		fi
-
-		export TF_NEED_CUDA=$(usex cuda 1 0)
-		export TF_DOWNLOAD_CLANG=0
-		export TF_CUDA_CLANG=0
-		export TF_NEED_TENSORRT=0
-		if use cuda; then
-			export TF_CUDA_PATHS="${EPREFIX}/opt/cuda"
-			export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"
-			export TF_CUDA_VERSION="$(cuda_toolkit_version)"
-			export TF_CUDNN_VERSION="$(cuda_cudnn_version)"
-			einfo "Setting CUDA version: $TF_CUDA_VERSION"
-			einfo "Setting CUDNN version: $TF_CUDNN_VERSION"
-
-			if [[ $(cuda-config -s) != *$(gcc-version)* ]]; then
-				ewarn "TensorFlow is being built with Nvidia CUDA support. Your default compiler"
-				ewarn "version is not supported by the currently installed CUDA. TensorFlow will"
-				ewarn "instead be compiled using: ${GCC_HOST_COMPILER_PATH}."
-				ewarn "If the build fails with linker errors try rebuilding the relevant"
-				ewarn "dependencies using the same compiler version."
-			fi
-
-			if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then
-				ewarn "WARNING: TensorFlow is being built with its default CUDA compute capabilities: 3.5 and 7.0."
-				ewarn "These may not be optimal for your GPU."
-				ewarn ""
-				ewarn "To configure TensorFlow with the CUDA compute capability that is optimal for your GPU,"
-				ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow."
-				ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5"
-				ewarn ""
-				ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus"
-				ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'"
-			fi
-		fi
-
-		# com_googlesource_code_re2 weird branch using absl, doesnt work with released re2
-		#com_github_googleapis_googleapis
-		local SYSLIBS=(
-			absl_py
-			astor_archive
-			astunparse_archive
-			boringssl
-			com_github_googlecloudplatform_google_cloud_cpp
-			com_github_grpc_grpc
-			com_google_protobuf
-			curl
-			cython
-			dill_archive
-			double_conversion
-			enum34_archive
-			flatbuffers
-			functools32_archive
-			gast_archive
-			gif
-			hwloc
-			icu
-			jsoncpp_git
-			libjpeg_turbo
-			lmdb
-			nasm
-			nsync
-			opt_einsum_archive
-			org_sqlite
-			pasta
-			pcre
-			png
-			pybind11
-			six_archive
-			snappy
-			tblib_archive
-			termcolor_archive
-			typing_extensions_archive
-			wrapt
-			zlib
-		)
-
-		export TF_SYSTEM_LIBS="${SYSLIBS[@]}"
-		export TF_IGNORE_MAX_BAZEL_VERSION=1
-
-		# This is not autoconf
-		./configure || die
-
-		echo 'build --config=noaws --config=nohdfs' >> .bazelrc || die
-		echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
-		echo "build --action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
-		echo "build --host_action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
-
-		for cflag in $($(tc-getPKG_CONFIG) jsoncpp --cflags)
-		do
-			echo "build --copt=\"${cflag}\"" >> .bazelrc || die
-			echo "build --host_copt=\"${cflag}\"" >> .bazelrc || die
-		done
-	}
-	if use python; then
-		python_foreach_impl run_in_build_dir do_configure
-	else
-		do_configure
-	fi
-}
-
-src_compile() {
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-	export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
-
-	if use python; then
-		python_setup
-		BUILD_DIR="${S}-${EPYTHON/./_}"
-		cd "${BUILD_DIR}"
-	fi
-
-	# fail early if any deps are missing
-	ebazel build -k --nobuild \
-		//tensorflow:libtensorflow_framework.so \
-		//tensorflow:libtensorflow.so \
-		//tensorflow:libtensorflow_cc.so \
-		$(usex python '//tensorflow/tools/pip_package:build_pip_package' '')
-
-	ebazel build \
-		//tensorflow:libtensorflow_framework.so \
-		//tensorflow:libtensorflow.so
-	ebazel build //tensorflow:libtensorflow_cc.so
-
-	do_compile() {
-		ebazel build //tensorflow/tools/pip_package:build_pip_package
-	}
-	BUILD_DIR="${S}"
-	cd "${BUILD_DIR}"
-	use python && python_foreach_impl run_in_build_dir do_compile
-	ebazel shutdown
-}
-
-src_install() {
-	local i l
-	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
-	export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
-
-	do_install() {
-		einfo "Installing ${EPYTHON} files"
-		local srcdir="${T}/src-${MULTIBUILD_VARIANT}"
-		mkdir -p "${srcdir}" || die
-		bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
-		cd "${srcdir}" || die
-		esetup.py install
-
-		# libtensorflow_framework.so is in /usr/lib already
-		rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_framework.so* || die
-		rm -f "${D}/$(python_get_sitedir)"/${PN}_core/lib${PN}_framework.so* || die
-		python_optimize
-	}
-
-	if use python; then
-		python_foreach_impl run_in_build_dir do_install
-
-		# Symlink to python-exec scripts
-		for i in "${ED}"/usr/lib/python-exec/*/*; do
-			n="${i##*/}"
-			[[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"
-		done
-
-		python_setup
-		local BUILD_DIR="${S}-${EPYTHON/./_}"
-		cd "${BUILD_DIR}" || die
-	fi
-
-	einfo "Installing headers"
-	ebazel build //tensorflow:install_headers
-	ebazel shutdown
-	insinto /usr/include/${PN}/
-	doins -r bazel-bin/tensorflow/include/*
-
-	einfo "Installing libs"
-	# Generate $(tc-getPKG_CONFIG) file
-	${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
-	insinto /usr/$(get_libdir)/pkgconfig
-	doins ${PN}.pc ${PN}_cc.pc
-
-	for l in libtensorflow{,_framework,_cc}.so; do
-		dolib.so bazel-bin/tensorflow/${l}
-		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1)
-		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3)
-	done
-
-	einstalldocs
-}

diff --git a/sci-libs/tensorflow/tensorflow-2.5.0-r2.ebuild b/sci-libs/tensorflow/tensorflow-2.5.0-r3.ebuild
similarity index 99%
rename from sci-libs/tensorflow/tensorflow-2.5.0-r2.ebuild
rename to sci-libs/tensorflow/tensorflow-2.5.0-r3.ebuild
index 3bfe94d961f..ee65934a9f4 100644
--- a/sci-libs/tensorflow/tensorflow-2.5.0-r2.ebuild
+++ b/sci-libs/tensorflow/tensorflow-2.5.0-r3.ebuild
@@ -87,7 +87,10 @@ RDEPEND="
 	sys-libs/zlib
 	>=sys-apps/hwloc-2
 	cuda? (
-		=dev-util/nvidia-cuda-toolkit-11*[profiler]
+		|| (
+			=dev-util/nvidia-cuda-toolkit-10*[profiler]
+			=dev-util/nvidia-cuda-toolkit-11.4*[profiler]
+		)
 		=dev-libs/cudnn-8*
 	)
 	mpi? ( virtual/mpi )
@@ -143,9 +146,6 @@ REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
 
 S="${WORKDIR}/${MY_P}"
 
-PATCHES=(
-	"${FILESDIR}/0008-patch-ruy-for-gcc-11.patch"
-)
 DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
 CHECKREQS_MEMORY="5G"
 CHECKREQS_DISK_BUILD="10G"


^ permalink raw reply related	[flat|nested] 11+ messages in thread

* [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/
@ 2024-02-03 19:35 Jason Zaman
  0 siblings, 0 replies; 11+ messages in thread
From: Jason Zaman @ 2024-02-03 19:35 UTC (permalink / raw
  To: gentoo-commits

commit:     9d1694eac17c1be7f0115747818ec13bc3c88de2
Author:     wangjiezhe <wangjiezhe <AT> gmail <DOT> com>
AuthorDate: Fri Jan 19 13:03:12 2024 +0000
Commit:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
CommitDate: Sat Feb  3 19:32:49 2024 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=9d1694ea

sci-libs/tensorflow: add 2.13.1

Signed-off-by: wangjiezhe <wangjiezhe <AT> gmail.com>
Signed-off-by: Jason Zaman <perfinion <AT> gentoo.org>

 sci-libs/tensorflow/Manifest                       |  18 +
 ...dd-rules-docker-http_archive-bazel-toolch.patch |  38 ++
 ...emlib-Latest-absl-LTS-has-split-cord-libs.patch |  32 ++
 ...Must-link-against-libm-for-round-and-log2.patch |  29 ++
 ...ensorflow_cc-Add-systemlib-nsync-linkopts.patch |  35 ++
 ...systemlib-Updates-for-Abseil-20220623-LTS.patch |  71 ++++
 ...0006-systemlib-Update-targets-for-absl_py.patch |  27 ++
 ...emlibs-Add-well_known_types_py_pb2-target.patch |  28 ++
 ...-0008-Relax-setup.py-version-requirements.patch |  55 +++
 ...flow-2.13.0-0009-fix-sparse-transpose-op2.patch |  30 ++
 ...0-0010-systemlibs-update-targets-for-absl.patch | 352 ++++++++++++++++
 ...mlibs-update-targets-for-google_cloud_cpp.patch |  25 ++
 ...w-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch |  29 ++
 sci-libs/tensorflow/tensorflow-2.13.1.ebuild       | 453 +++++++++++++++++++++
 14 files changed, 1222 insertions(+)

diff --git a/sci-libs/tensorflow/Manifest b/sci-libs/tensorflow/Manifest
index 40b0ce6a48ca..6e541231fe2d 100644
--- a/sci-libs/tensorflow/Manifest
+++ b/sci-libs/tensorflow/Manifest
@@ -3,6 +3,7 @@ DIST FP16-4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip 91462 BLAKE2B dc40d6129c0
 DIST FXdiv-63058eff77e11aa15bf531df5dd34395ec3017c8.zip 16646 BLAKE2B 8f6ca6feee16cb57d9b4504e7615703c2c7ea86fa8ea3f814668478cc13b8cb3cb1429200359be5521ca11f77e1c3d575b2222d123d3ac03586faab88a131ac2 SHA512 6b4830a56137e226d51ba8166fe1a31c51256f1666cc0a683abfb154fbe313d415ce15e362756865a1dd91510cf581c619604b29e8a5288c328c85da57d53308
 DIST OouraFFT-v1.0.tar.gz 110531 BLAKE2B de0e280407e21118e2bc6ca93a7caf3c4a49d0a13eda018b1497f98851af73dda4cef56460dba310336c7ed958e34feef4784ca2575a13287dec2c1ac9a5af6d SHA512 89c6e8fd57abf26351b3efb792008a1bbe62d404a4225dcae8aa666b3782a421be071bdc9760ebb0c95b5336ee5ea517d2fa43ab915045f7cf6fd76e73578079
 DIST XNNPACK-659147817805d17c7be2d60bd7bbca7e780f9c82.zip 20341886 BLAKE2B 9aa37b3622e62e554b9021fca88f9f8d274989d4799d942c27a6d13ebb59409b130423a23f60f4e42042b8894457d8d691e17e3f21555562c8e9d1f97bb1ae13 SHA512 13c87064ec57adb29320c4d061ce0ae6cdb42b5e7584fbb758e3703f7f0a96d47e618b7ef584bf10d5b11d3658c43c6822e3b13748b6e659a5820ef534652ebf
+DIST XNNPACK-b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip 24168206 BLAKE2B ba6d612cb09823a655f065a76bd9b956a37664eade90aede2d30d9892f6bcfa2c6134f5332eb31247bea6b46e51f47250ae09a6810bde181c72f715550811f49 SHA512 29c844e758ec2f2913dc477866b016afc04679f16da5774069dabbc9373ed210d510c4b1205eb681de20669e49f34098f490340f5524139b079461589f41e7b0
 DIST XNNPACK-e8f74a9763aa36559980a0c2f37f587794995622.zip 18756888 BLAKE2B 0a1787166e8bbfda4aa6010075d92573112a21f3f9d3b1c13bc931fae6fa4cafb71685e4c57e86d7a662912bb6431c2d39a24378bf82361b50e5855d1b62f524 SHA512 a6802f0995742af0ca82de010cbd42da230b36cc884612d4ba2de20ba0ca56da6a11209bfb01ee1a5ddc31dc891a69438fa4836ec9d62d56e32c6aa144c6e7aa
 DIST apple_support.1.1.0.tar.gz 27105 BLAKE2B 6982ed0188760caeb6951dd28d211449d37a3192fa75e22f5ea86b599a5a92bf8efcfe5a549146533b725aa0fd41584c4d12db3fab41ffbcbca60f657e9590f5 SHA512 db291209ab9a54238b244e02abbca749a695ca3b9b9dc2e207227d3ea32f13144f3236fa921df4c6ba954850635db56584582d8916bdf4c90a2adc55dc90cd3a
 DIST bazel-skylib-1.3.0.tar.gz 36103 BLAKE2B a58142b9d2a5da9f137705105aa735c8489519989ca7e633968114309f23074a56cd03b5fed70e284da63751d666904c2573940ad9a0feb2be689d695f0f07ae SHA512 ab3a0b465ebbfe07c139b92f1e8b2c0bcede66d6366d184891e3c0ccd6619164bc299777e7d7236cb463834b98426f6fb6890409e0ce94b75446dbd85854944f
@@ -13,6 +14,8 @@ DIST bazelbuild-rules_android-v0.1.1.zip 14654 BLAKE2B 1cfd5af08c6eab9602883f62d
 DIST bazelbuild-rules_cc-081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz 130803 BLAKE2B 406331205e8e35fc9f7f727f6f7f9697c658af800e4b90373440c433ec5cfedfe0a3986ce5b1499a2db5b7aa4dc62efac364e73639b98864d2f63076e5b209e3 SHA512 2dc8f50c405cadf10d797117b51868fa22798c4ff7cc1c4f7d4c39fa0d2e5ffbfe8269df42f90c92e0870bb8e8a9c204d3dea237a7ded03f6275657060ee9774
 DIST bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz 464906 BLAKE2B 033d76b8081f4f987e64875ad5c8e7b8f894ec6be58c52ee02c4d31d4480fee02f3f432ea9c4630ad3f5d1163f820aff37f6493da797ec51b148b361ab3c8b25 SHA512 2cd841f4530503ed31fa6425cb749ef29f8a1071b5d55594644303233e58455783cb02402bc23d7104ef036745733087d43075a1fcdab2ac96cd1a9872a6ea4a
 DIST bazelbuild-rules_docker-v0.10.0.tar.gz 549649 BLAKE2B e7a537b21138a5c5d9ce360e46238f57c917d2dbf5dd17887607402227cbe7c5d68aead9db0ecdb74c09eed9dac26eb86e004a9020c251152d15beb48be0e0d7 SHA512 7802107f8520c88646728800637cce54dbcefc54aa4361288a5b46e403e41998bc23e16fbe94f23a2ca52e5c33fc255af65846963c2fd0139ca1195435ce1b03
+DIST bazelbuild-rules_foreign_cc-0.7.1.tar.gz 215296 BLAKE2B fec096138fbe0662bbaa566d1f9ba36f60ac14499019dec060d1cbe21b09fe11cbf9fa72dd45884d9c14dd5cfc0a41087634ddeeadafe1ed37992e965b362513 SHA512 cca6c685f566b113148b94a02c5a976ec585e7e1794d18c607983801ddb3985d5031aa76501b095d79420ac5852d8c27812f005aaf2db43ba885892c51e7fd0d
+DIST bazelbuild-rules_java-5.5.1.tar.gz 21057 BLAKE2B c9caf7019b8377d204adfafa45bc6c274177709ac2e2e387895dc186d035bdf0f22663c498b3d393dd947ad078b149774d0688bf82ebd3db6163f57b5c2141c5 SHA512 71fc4b2990cac3e8d4205fc847df22e480d33a2411dd969d779d0c3ee66ed6bd4f3a06e396d0df500442c942551cc69cdc5288eac4841f2badd99f44b61447b9
 DIST bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip 9422 BLAKE2B bbc45c97551ee8126335b611ffca6574dac843d4db9b3221b10d26487f8eecfe38ba0cd67a4039a3ad921c25fea6294c43e4f4e7a57a060a30791acd6c4f9d39 SHA512 ba06242feb711e7fb6e821fd9ac029248b4f3e466fb1acf971d0db8196f3efe4d114ef137acbe4913073f8cbe8ccab4d47d8bafa21b867b2d6a6d57f5a647f0c
 DIST bazelbuild-rules_jvm_external-4.3.zip 3998031 BLAKE2B e13022ad12c5cd1901d7dd3f1cccfd8ad04e0cf8f12b7329b9a1ed49b7ae7aca6c08704c5229427fa6a5b1f16e9c50e1a61f9adbe75c8a7ab69a291d8b2c2914 SHA512 a23bb25fdf7a5f5916bf5a4576c6c06ae2de92a6b9d8d52720ed12e61ee766f665652acdf84f9814acfd90baefab0941d3244a011f341185a378b053a51f21f3
 DIST bazelbuild-rules_pkg-0.7.0.tar.gz 76580 BLAKE2B 77574785070b45609d12aa2c2dd53c8cef18cb2c3a326324b5add996cc31a40f725bb3b12affcfba96540cd1d64a60fb8afa789125fe0aca328c652755d12cab SHA512 2fba108997065d753e17f1e7de1db08461416d03f8832d38e25f5151bfd0ad896565db9eb56b3702b604296a19620f9b60e5c15440855d36e64865010e6f4cdc
@@ -20,27 +23,40 @@ DIST bazelbuild-rules_pkg-0.7.1.tar.gz 77334 BLAKE2B fef99181792dac840724d0cfe4f
 DIST bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz 14304 BLAKE2B cdd23ef47d247f6d1b9fbaa49edbda7e1cd55ad2e3616f43ff8d21fa42888c1f36172683e47beb3f3678a2b252d9b6c82fd692711e3133862eade8b64da06ea1 SHA512 024021816b4999b62db41709e0e9354ffdc88beb61a081b061d934241f06b1aa5be3b74324cbec94d1871e60eb65209b2e6b4bb8ca4a101218eaf6196ec6a974
 DIST bazelbuild-rules_python-0.0.1.tar.gz 2302092 BLAKE2B 1db52eebf2461d779f764f2afdd070d1d0dd65eb2b83ccd98c2831da1784614ca281b114064729a9f257c64eceb62975aac8362d231c84f32abdf19aee7a1852 SHA512 40fa069a4482e2f83e29dc8e109652d14d187b2ec8efdcd36e98d117de93d66a938ed74999b42a2293fcb6eccc0a111cbbcf65c5c155579214bb1b96644280a5
 DIST bazelbuild-rules_swift.1.0.0.tar.gz 199181 BLAKE2B 8261cf061ab630cff5bd0bf55c0b62252d3c7cc34a368eef80c0d1e70534dc43b5596077754306e87ba3e5bbc4b77710ba4934ff748079b8e03e72143e15deab SHA512 9e4acdd0a168462b5b480aad72cda9b3b16aaaf86fdf367c4de80dfcc093cb80c74f2f2219190730be10471d07c94c4f9bf8756a938fb9aaee9d1a4d983c4761
+DIST benchmark-f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz 204856 BLAKE2B a1601a38e71e08490f42e85e87675b2451082c531e2712f7e78ac0267c7fab0b6f1ac96fde34933d82380c61097a4467b277790657695fa51c97ac9504435959 SHA512 e4323f2e7b05566e7b634cc1ec18ae64c7cee1bf4ecdb2a3da97dec8393b1ef26580975e7823d4ee9d51d151db6456bc685717836eb3209574ada22a07451063
 DIST cub-1.9.9.zip 619595 BLAKE2B 265b797a906b03da886de88863236c9ab90daa31498ddf848fcaf5e5ee1342614ad9a41618120ca09cc4c0da3e96eeec5e20ca9d7ba3f9860c507f06d15e59e1 SHA512 8c9c0a3f66f8d518ec07f857b5625e006d52f28bade1c1478a0f37420e2f7586dc3ff029d551748a1802bb5544b16fde5388e8d5a45d61eec595201b9db7a30d
 DIST cudnn-frontend-v0.7.1.zip 20112411 BLAKE2B 6f836f6b484e708d43833aef3ae52b930b80845d9e85c61f0fa4670c04a9274975eb65f682a989b9ce5ce1c3992cf43afa6f0c8e684b1754965966a79bcfda65 SHA512 f211c5e8f2a80e445d15fc9003a63485a85dcfb91c170f92e0464d494509ef44c0083a745372ff2e17dc25791f90402e4a672f795eb3faa9650e6776266b6a36
 DIST cudnn-frontend-v0.7.3.zip 20124177 BLAKE2B 9c32d99d69d4c7bc96fd6189aa1c2cdfac6fa31dfe84beebaee0e791e7a27768864067159da4473f737612973388daf39c7770ad9c1270bed840221bb603fc4d SHA512 68f5dba9873b317d8239187b57e7b4306e9078e52ef0992e6f23982aa374eff6c2ef2232b6cfff8012f50d9105d6f61c84f7f7c9ab4139d4db451599f896e0b4
+DIST cudnn-frontend-v0.9.zip 20077185 BLAKE2B fcd9425be4c2ecc39db0fd92be355a7767b3d00cea990ff4b63ade3dff957f97a6e5fdb1e6f287f6473b2212a66e160940328062a70485c38d5619cf3cc2eb54 SHA512 f38fc912303f4f61ae76d3159ac51b105aba0f728e895b850a164480a729ec144bd8f99764db3e2203210dc114662aba4b4ffe0435d027c0cf620cb42a50df64
 DIST dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz 76170 BLAKE2B c28873deab233d73996137c254acecc4adb0a750cee675cfd0777ccdfa91ea704e338e7166705d47e775c45b46b152834268d89c0443a08c57b4b830bd07ac71 SHA512 e6a4fe9356b8f75f96e7f9960df40e227f8e5242e609f8cc8bf28e8161bd4f58e8c6de374d9cf216edf7e0e09ca502bc158d41c3058bc6e6e7b2bbfb9c5483ff
 DIST eigen-3460f3558e7b469efb8a225894e21929c8c77629.tar.gz 2833459 BLAKE2B f624102a174d80860314f0e895f641fb86369a2df88ba2e2589da86e5ff0802b36c64be44211f6013997e0251d74bb28051e3e7edcc6cc43ab88448524db3152 SHA512 c015dae59e0df1f25b6caef5f3c724cfd91cd440df6c3eba1ee7c129b20caf9ec4010209cc5edb82ed6534503a697ba6ee49f64be0359de332ed023cdede05cf
 DIST eigen-3bb6a48d8c171cf20b5f8e48bfb4e424fbd4f79e.tar.gz 2810518 BLAKE2B 97c9221024f765e6899c676602ee2c850fae661dad613957cead4bce29fce8d9cbb1ac20b812b71c699feea75768be2da945fc39e9b9e9cd2e5b3c6bcf034c60 SHA512 de2c35d3ab859021dac9de35b83cb94e824814e436cd40e45ca2f5f7f9fefadac2b4222f05de9eb1b03d04a29c751f9da3a2b804e6c0fc97b4a5508f25b1e7d4
+DIST eigen-b0f877f8e01e90a5b0f3a79d46ea234899f8b499.tar.gz 2833536 BLAKE2B 04bb103b64fa8c81ed337c67f5484fb15097f03905a504b19ebeaad4b33ab75baf1e3a2e060c91f1974272f55998555cd16c3da9d8a54a725aef39da7d39dae0 SHA512 1b239db63199aa6f3f9c35198294aff5b89c817befe6f394d69d2f4d6c6e3d946fda32119592da0d7894ea8b4fff12a1c1b8c5eda2e52f7365dc5aedda11f90f
 DIST farmhash-0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz 467251 BLAKE2B 74fd0cf3de41a6db0aac74148f795335df58b5a3bdd370b22ae0763aba5a4952dcd0133ed300350636a4678ff54a5c59eedee749dfcef7283e21c181b88f92cb SHA512 7bc14931e488464c1cedbc17551fb90a8cec494d0e0860db9df8efff09000fd8d91e01060dd5c5149b1104ac4ac8bf7eb57e5b156b05ef42636938edad1518f1
 DIST gemmlowp-e844ffd17118c1e17d94e1ba4354c075a4577b88.zip 940590 BLAKE2B 72e8ef48e6c20d3f3510eb3a891748df386f3ac127e79cd5c594b7289826294e29be5987632800898d94d6ded9016e398672fbdabd07af8f55d458540e66ec7b SHA512 b5683bd6e1cb942d0e1e07c3bd28d83d74706c45a33b59896124f659f361bb4f93181d3ccd282d9dd61df49695b38470f138ffd2c88b6d11fe160a0266d3909a
+DIST googleapis-6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz 6091152 BLAKE2B 7d846fa18bdaf2ea12c1a064d6bf5a221f02b4398821032faf679dc41778f1ad13f0631a88baee127dfcfea40d3aed75f6bf1253ba800b25a4ea7b067044596e SHA512 7d098723547835c6646b776c764e85a226c23cab1709b7d4a780f60a3193630bb9301c0153382175fb4ddab876d1168d98a325f14bdc8500c02f6723fe7c7ab3
 DIST highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz 160745 BLAKE2B f2ffe8f13b1a8346b5b29e719c0a57f34e1fa6320d0f2d0575e4e97e813ed1a8f55acfb44b57aba70ea7d372ade18aee5ef82a881ecf457f580ffc92d6528c7b SHA512 4d6a7e8e321df82c009a0849b8da097a551f6c35a49fef65b89e731075a6af624918c0e55be9fd3a5bf07c519ab09bdefed57e0e39d4df0e79e189c468939de7
 DIST kissfft-131.1.0.tar.gz 52383 BLAKE2B 74e6d2e7d132a311b31c28a468e13d9772a53f0ea0abed0e0f49d8db9c183fb0646f58fd38df3e797b8577285899daf6b80446b149ce2582bb828410656d96df SHA512 bd715868ce0e93a291a0592fb1f8b960e832fc64efe863755e52b67d5addff9bcb444a1bf2570d1914c52b41dad1023d0d86400f5ea30c9fb84cd6b4f7210708
 DIST llvm-project-10939d1d580b9d3c9c2f3539c6bdb39f408179c0.tar.gz 179559452 BLAKE2B ccdf998502aea7e87ba128560f5458b2a959d32a13f56dc795b8a0ed794c0348ca035ca601a34c0c288990f358dc5c3c01712b7458ebd15c48b1c41b4413fcd2 SHA512 36997be5da7caeaf949ae093b3ec1d81dda668087cc94de1fee9f5262a019f40fca61e652787e3f9498cd8b021b8ffc8002daef189ae0e15fda281ef6d56ecd7
 DIST llvm-project-d8415b02a519f222ecf71b069c96cc85ac635de3.tar.gz 169045379 BLAKE2B fe25f9c889c5159fbc1d251640f65b3097b4260ec7b27d5133843502ee397995c1075fb10f1a6519c6f693757ab8fe0fe2b82bb96678ef4ec4086c09ce3c90c3 SHA512 546edd97778b4298d7bb645620010e00569813fab07b925a909db4cdd8feb9adc4898e488f7bb31e70587d7b4134820a3f49a98d4f87bcf1dcad9adf5eed7e4c
+DIST llvm-project-dc275fd03254d67d29cc70a5a0569acf24d2280d.tar.gz 182909064 BLAKE2B ba2a2db104849d1b09115cc2decdbb2e5dc84c58b61074500ff728e29c2f380a0818a4e8df22f4a1552c04e243dd114862492d7f8df06132348034c500200e14 SHA512 4f51271b765a666b023547382f3f983453afbfc69b793336e381e335d6103978292e781f86fffe16cba8b6d6ea309b64e6d899570060c275779aa0a2b90948c7
 DIST llvmorg-10.0.1-openmp-10.0.1.src.tar.xz 955492 BLAKE2B 4197ecfb2e3498a95a2ba5923f2b4bdafbab645ddf2d3f1875d39752d3ab7304fb35bce918d2dc6e40e5ea809545ae0907d6bc0b94a3d68b8d96da48f5a62adc SHA512 5b6f6487feaabd2a18ef2bbb1a5f86bb567e264f9fdf56805cfdd3f65f36f21e7c1392ba5170fafb52a395fc1606b277233beba3df31dc2ab825ef8924e7a05a
 DIST nvidia-nccl-v2.13.4-1.tar.gz 287269 BLAKE2B 8719e26978225a9697101fb7998388c3179bb0af5c396602689242e5529492ad7a81a142e3a8492c9fa4c92adc251c58e67f97fee64a4fd1a046134ac7d737d7 SHA512 5f7077f69a0c1624a1c8ca3d2f503d8269751e26cb6ee63e1a165fb86877b62073ec4e182d939b9aacce4ee8bb8295a39d1b6d65ef3dc0fce795735341a13fc6
 DIST nvidia-nccl-v2.16.2-1.tar.gz 326883 BLAKE2B 86db7adc67ba311b72e7e013dbc2a04918c0746c1fb62079ccd3300691479e1f6e35e379d6ee4320e343666b68372c56607ae521f5ff2d7e59d5f4dc3b894097 SHA512 e6572c2e7adc03053048c0b1e5290ffaf6f294239d78038887582c847aa549e5e95c7970b943f1d0b8964c32b4cdee3785bf40886f274907b613f320e9de10d0
+DIST nvidia-nccl-v2.16.5-1.tar.gz 327261 BLAKE2B abeeb6a2d4b58647ecb17694d92f79e650d2f2ffbccf26682ab202e17a1b7d3c356fce26d9f6edffee0756d71887bba8a9d5c254ad433d3b4ae8babfe3294534 SHA512 fc13e83e2339921b732c02250e95614b21202c52461aa262489714af6d92117aa5c0647bb0dcc3394cd357d4b7e8a76fe4c3a3567ba4512c359f19e2ff41de4d
 DIST oneDNN-v2.7.1.tar.gz 6405831 BLAKE2B b43253f7bc1be0bca51746f06d825191ae544376b259662cbf8f567d8f39a6befde3c88a14744e053b851d2f89fb2600b999abef1acb585bc116d6fa0c95fe3f SHA512 062e97ac613d265627ec1f010aa1d101bf71c449c813187c26244c66c9e6b9b582a0a0a823a9391fa828f396051318fada8263ff64c4f4b4bb6ca1d7a08ea6e1
 DIST oneDNN-v2.7.3.tar.gz 6410473 BLAKE2B c6730100e0438d456eb4986f416ae2bd1f173a80c52d5090523af06790afae8ee17cc58ffa8ed7215cd0eff99191a925d8cdce9986c72ccb8ebffacedc434b18 SHA512 ad9450f8b701288fa1721f64d0cb33fc344e9fc4f84e517b3d52377189ffcd0c5b56156ef4a33ca3ffe2da886abcc7ac5b2a3407cc155bd4be2223751b84f7c9
+DIST oneDNN-v3.1.tar.gz 7556565 BLAKE2B db6865410e902778c0153c50cc1f16c12e358b360d7e865207a86489d42727e66945d422b8bfa52b04b5f2b34daf585f1472a031cd8810a36c6724a2779120c1 SHA512 2053157a3885618364a9da5ec738c9cc2dde15db1ce9737578565d25c4a15a65944db3bbd17780de2032cfa2329bea4cb0af24ee428c9c246bdfa07a5bdde30b
+DIST openxla-stablehlo-43d81c6883ade82052920bd367c61f9e52f09954.zip 27954369 BLAKE2B 30dddfcf2102e344d82171d8fcb2df68a3c2dedfc349a3f248c060e591535127d7716e1bf10c5eef20369eb0d81a6cc0eb5350a6979adb8a164b7bda62d6c745 SHA512 2432e4256bfd2d92ba717895967d87e05bb0201a5086314b1de5fe9078bfea37c14245b88b720ec82f2906751ab344da0dab9f714a6fffe79a0423cf7659e5ac
 DIST openxla-stablehlo-51f005f0a8ff6e28f535adfec4de936cb4097aa4.zip 6902721 BLAKE2B ef9766377a38f816f5a6dc60f34d5300b2775bc282084e9f34c7a5ccc6104a0154d44f2c57aba081889de50fc141a6059255fca3f681322343e316289d6540d7 SHA512 ffe46e21be6f617b6ecbc7ef35e83d441256e429150af60451cf04c02085fb1a0b60a9687d8d60d6f1f9321e6f6a92f24749a3c1cf1ee694a8ffc0fcd13f64f4
 DIST openxla-stablehlo-fdd47908468488cbbb386bb7fc723dc19321cb83.zip 301572 BLAKE2B c8242b3d9612fbdfa1c34ae5cb610aadd68464498e6cc84d48bcc38abb9e8636fa31b9a03b5a604a29cafe12a47e5b7be90c48d52fb6587bcd376d6307775197 SHA512 61b89d0dafe450ae95d770878385b5ed3cbb0109b79cf5e01304119f2f52255ccc83cedc542cfa5c342692945833b86606468224e67c6ff9dd6f7216b86adc7a
+DIST openxla-triton-1627e0c27869b4098e5fa720717645c1baaf5972.tar.gz 985789 BLAKE2B ef3aa4d8033757d37a9ecde3a687a261c5ecde1200b30db1ae24cc5725c334f8662e741041be1e854ddb2cd5d9cb2b9d5f254d9219e8035c5f08df92b3ee9fab SHA512 67ae9075219e142ec3f3e5c3f05cff5fb15f3ef9605bd1c0dbb1e26940bf1e428a9c17b0d5f85c069844f6ea94b8a3ce94277bd4cd344238fbbdc3f56f91e08f
 DIST openxla-triton-2c3853269281da6742cf469a5ca5772947d271ce.tar.gz 459751 BLAKE2B 8b1b314fd1b6d8822a84cb9cacfd70e2c59784a76f879d75c910f376d38fbdccbc132ebab7f29c5bddde04afd7821c0322311d97c55fcfcc87580b82039a7efa SHA512 cedee3b982b93ae237a1e035ef2a9447aabc94ea4add63959b927670006b5cf546f064d5741ee9e731de31c343ed5869abe9c479d07360b07ef2b26f93081a6a
+DIST protobuf-3.21.9.zip 7156366 BLAKE2B 464ec84fd380d2d472cde5b7dd978c31ac8dc455934b7b1f7afe7dd836579ff74c1196d56dea2944fb41e5ef2f2e05683b889b54e4a3a11bb7cf87a0cd57f691 SHA512 311e0bcd3da54b8fb2df4de03a9b465cd56451018438e4d72054d78e70c37c70ee3c8a0378a177acb737ec863bdf9250d10bcbcdc0f97707c7edf97b9b37683b
 DIST pthreadpool-b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip 61524 BLAKE2B 924419730bc6b94ec98a13db94f177b3b70d6c763158f43fb6f9e45605e73cfce238e6c996e2bf629dbb2a5af93ae99849ddc91174fc4664d702667f7423892d SHA512 d25262b47e39058d5aa8b8197c4984a5b941587a19079a2210f738c3af34ab8e8477731c88ca80c3f812a6a04635f80300124d93cc1e099352ef2aca04bdc3ae
+DIST pybind11_abseil-2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz 28202 BLAKE2B 58a13d005367d938e9fc17da6435a8c2706722d0770c173cbfc433b7ea4de7e7d1b97653c5859cc8c436cccda3b8d21df906249a3a60ee4bba7cc6601abfaa59 SHA512 91befca03fa1b4f12446b84d5fe3514df6c9e352a19042f4f8856f6df306229f23b6ca2976a17ab22c8dd5afa223a44013e54a4348298c305a7688646129f0a4
+DIST pybind11_bazel-72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz 8722 BLAKE2B 3f323e622fdd9921019cfdf4d281d54530b152efa557d2e57ed08bc8785fa419815c2a7e36c38de8d0c984f76d052607d4fbd409c4ede76cb9878823350dda2c SHA512 80fff0799119b535a95badb307408c7a81bad0261c0e753095732036d0801c4894318df736c6b1b3ca09201d3f5e4e971593a7d5e9df71c1cd588e4be3799c1f
+DIST pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363.zip 76830 BLAKE2B 16b35eba4d9bcd345809e9acfc4c692a30d91ffa5319064e8d00a9d1382d6215ef2a73447ea54c8302610fa59d0b1a17add73f9fb0c86b2fae946614f1b70592 SHA512 f73fb217ff471143f473707576838ea85c5319edd09847d452cdaf66445f2abe303545208c44bf32856777d27323d7da9d76d2b148a16dc025c351b16fa2b279
 DIST pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.tar.gz 3515639 BLAKE2B d4adc49ea1bcbfd7a7efb13cdfea6a1d9cf717b06209c03342f92a3d624de65bcdf2ce7aa7fa8bd5f95ad423447ee833fdea718e16f98037344df75de8bde943 SHA512 f4c0ce922cee0df62c15a335889bb59b5f70ad71448b42b9c1bfa9b5e77c4c4d5f1613f607f32fa9d6817d0d5f49c554e1378079a1cd66a9cd2492796d48c3c2
 DIST pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.zip 3812878 BLAKE2B 30048677534192f1e288c69be5a0373844206cc4e209d48b92f5bf38da37003bdd5125b6588ec0f34acd855acd9cd16193725976ede644d3140fbbcf03d2d364 SHA512 963fa6c6948102d15cae1db90645e9cf85d1efc0fd541f75dfff7d6efe62fdd196085910cdb366be56b7e71f36df98edd211fc875aff6eb1e9962e0d62f43667
 DIST pytorch-cpuinfo-5e63739504f0f8e18e941bd63b2d6d42536c7d90.tar.gz 3512335 BLAKE2B 71c14866fde3846b5f375b39fe2f081a6c219b36fc0721640971f99c53ca77c8e7f8df83992d777af32a28c645d64f27dca838bd8e60835c5497e131467b22d0 SHA512 6a61f4574661a55771c2ec31bb0919a51d0bd8c770477b254a5c14dc5323716af275c7fe3abc5aa96720d7cc929559ca66f614265d3940e076b8db2fa15c8e36
@@ -52,7 +68,9 @@ DIST sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz 2569603 BLAKE2B
 DIST tensorflow-1.15.0-python-license.rst.txt 45132 BLAKE2B 8a8ace4b64fd9eda1932272704dd4fea002d2562cd9c2c94470d3ff5de58bb171ff849d3f998ea90cd779d1084d39af1267aca8400b74b701205d2e76e6a2628 SHA512 c79c42f4ceecf33c8f8778ca17b53595b75d5d0bdf30b11058cf01dd70a6351f530ca9dd846460aba14edc1d5876f7bf6fa0f9f49f7915590b0f54d267ce3856
 DIST tensorflow-2.11.0.tar.gz 67996769 BLAKE2B 539a8d7084280023c7b019a079aad6bdf0fa94b22276250a02913fb0ad496b4af2115276152b4f37101547404b616de58f209b9d1036e5d4dd9b0f7072a59ba9 SHA512 cda16db72a0ede72ac9f5e76c3a745ea9d72421fa40021303032f8fc3ac2755f64524f97a4629c18cf888f259027439b49ec921e0f5fd329a6ba060235a658d5
 DIST tensorflow-2.12.0.tar.gz 69710128 BLAKE2B 582b7b2717edd0ce41ecd74525fd38684d375cf1833c206cb53fa10ef964bb41ef8f29fa947a1f77e892bb68793d53c567bc9c4c9124dba94913f57ddcd3b1f9 SHA512 9273720b5be08e5d3dc76aafa4af6b27a2d50afd02b181e7632f3d70961995b2e0e5acb13e70c9b704ef475617c23d70047fbe74d5b63b156cf8f2fa8a856b84
+DIST tensorflow-2.13.1.tar.gz 70663056 BLAKE2B fcaff251ae3757f7e20deb07566485ca243f943ce118886edcfea3c41aff8baf83b98e5e3eea97c5243cb5db2e7418ec58687b3099dffd5c750395b554689343 SHA512 4f7bae717444a34704cb53466051a5708be13342a193be14914fbddd4790e327f3b6c37063a36d1c7835c50cf99500895aaffc646fdb9b421e6b873dfe4b2e46
 DIST tensorflow-patches-2.11.0.tar.bz2 2977 BLAKE2B 53672704ccfc5291f7070421af9f7246d2f211689b18f35917d4d166ff5e9ddb623db4dd9dc8054e0f2262b162dd8c2216446c6ca5e2bf538872debf8eb8aec1 SHA512 866c6abb78934c1a645ab3172f93d81423e2023fa1e8688255ef0777e340d810a6889c838b841be765f0897f7a269c4d6cb52b8f59af114bf5b9e181b1348590
 DIST tensorflow-patches-2.12.0.tar.bz2 4194 BLAKE2B b61efaf0ade6ef88b5abb858a84b537e02ff9fcd032a2a7f68a6467e53511a50fff66ef7e1096f343a8909e165b1b76146cb6a8db8e1974eeecf2cbf0b6a71a0 SHA512 2f931fd4b995d33300d392f7dafd6dd23671772f733c28faed239d01e9b032967afb17cab50908fa38956e2cde479a13dfdc632e622d918fe55d281aa9b3dc4e
 DIST tensorflow-runtime-4ce3e4da2e21ae4dfcee9366415e55f408c884ec.tar.gz 15313054 BLAKE2B 316da579b93d83bca43d51198dc65dea12972d73f019a5b78fe53162966e022d21d4225ba4a7786d1a0f376550a1052c59858df04b958768962b88d64d3c5083 SHA512 ea490ebc8a5eef4a7ce6185c19e3b1305fd886c8145ef54387076f458bfec56a8a33452728206afa67001273920f6958317c8c4289e32ac6fea432e15a2502c5
+DIST tensorflow-runtime-7d879c8b161085a4374ea481b93a52adb19c0529.tar.gz 15228644 BLAKE2B e621ece4bbe3139661ef48c628459118eb2078151907630d6fde4086bd73f09af2ab0bb1c43ccf81d84230e3bb3be617e505f76c5d4333fee9adece58e4f4042 SHA512 f79f1e0a44a60cd064e21461380dfd5eb47a8912064f238da4ea94c8c8c94a680e438ff2b202bd0c81049e104293b5bbbcdfb604cf9ebecf6e6bf34d6782b0f5
 DIST tensorflow-runtime-91d765cad5599f9710973d3e34d4dc22583e2e79.tar.gz 15226589 BLAKE2B 5a00d0f884c86f85a25aba8f7d9eee509f35c114e9bfa24ce3effe9437bc549a4a7f013b03b515fbb4a217724a2e2abca96300fba64560b0d0e4fdb05fb9c3ac SHA512 b2fc8a240de1a13fade8628358f410b8f6e1bfde9d2cec3765b62d2ee7eb143c168687a23cb79c7aecd19a668d2d3be46fba361d16ad29b722fe19004aa3c5a2

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
new file mode 100644
index 000000000000..b6a4dab0313a
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
@@ -0,0 +1,38 @@
+From 87b5fceaeb9667c47504637462f0b5944661e5fd Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Wed, 11 Sep 2019 12:08:34 +0800
+Subject: [PATCH 01/12] WORKSPACE: add rules-docker http_archive,
+ bazel-toolchains uses git_repo
+
+git_repository() rules cannot pull from --distdir and fail when building
+without internet access. Use http_archive instead and pin the sha256
+hash as well.
+
+Signed-off-by: Jason Zaman <jason@perfinion.com>
+---
+ WORKSPACE | 10 ++++++++++
+ 1 file changed, 10 insertions(+)
+
+diff --git a/WORKSPACE b/WORKSPACE
+index 1286ef9ac03..1a19e5f2e3d 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -1,5 +1,15 @@
+ workspace(name = "org_tensorflow")
+ 
++load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
++http_archive(
++    name = "io_bazel_rules_docker",
++    sha256 = "7d453450e1eb70e238eea6b31f4115607ec1200e91afea01c25f9804f37e39c8",
++    strip_prefix = "rules_docker-0.10.0",
++    urls = [
++        "https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz",
++    ],
++)
++
+ # Initialize the TensorFlow repository and all dependencies.
+ #
+ # The cascade of load() statements and tf_workspace?() calls works around the
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch
new file mode 100644
index 000000000000..d14e9a7215a9
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch
@@ -0,0 +1,32 @@
+From e3c7880a940711d071747c2494dd8966fad9c63f Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Sun, 6 Feb 2022 00:13:56 -0800
+Subject: [PATCH 02/12] systemlib: Latest absl LTS has split cord libs
+
+---
+ third_party/absl/system.absl.strings.BUILD | 9 ++++++++-
+ 1 file changed, 8 insertions(+), 1 deletion(-)
+
+diff --git a/third_party/absl/system.absl.strings.BUILD b/third_party/absl/system.absl.strings.BUILD
+index fa9a7a84f67..63bac99d71b 100644
+--- a/third_party/absl/system.absl.strings.BUILD
++++ b/third_party/absl/system.absl.strings.BUILD
+@@ -26,7 +26,14 @@ cc_library(
+ 
+ cc_library(
+     name = "cord",
+-    linkopts = ["-labsl_cord"],
++    linkopts = [
++        "-labsl_cord",
++        "-labsl_cord_internal",
++        "-labsl_cordz_functions",
++        "-labsl_cordz_handle",
++        "-labsl_cordz_info",
++        "-labsl_cordz_sample_token",
++    ],
+     deps = [
+         ":str_format",
+         "//absl/container:compressed_tuple",
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch
new file mode 100644
index 000000000000..f07c70b8f88e
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch
@@ -0,0 +1,29 @@
+From d508dad1ffe6c6229f912fb7283dc90666a09025 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Mon, 5 Sep 2022 12:52:44 -0700
+Subject: [PATCH 03/12] mkl_dnn: Must link against libm for round and log2
+
+---
+ third_party/mkl_dnn/mkldnn_v1.BUILD | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/third_party/mkl_dnn/mkldnn_v1.BUILD b/third_party/mkl_dnn/mkldnn_v1.BUILD
+index 6a26ca83b44..c938a8c7a9f 100644
+--- a/third_party/mkl_dnn/mkldnn_v1.BUILD
++++ b/third_party/mkl_dnn/mkldnn_v1.BUILD
+@@ -163,9 +163,9 @@ cc_library(
+     includes = _INCLUDES_LIST,
+     # TODO(penpornk): Use lrt_if_needed from tensorflow.bzl instead.
+     linkopts = select({
+-        "@org_tensorflow//tensorflow/tsl:linux_aarch64": ["-lrt"],
+-        "@org_tensorflow//tensorflow/tsl:linux_x86_64": ["-lrt"],
+-        "@org_tensorflow//tensorflow/tsl:linux_ppc64le": ["-lrt"],
++        "@org_tensorflow//tensorflow/tsl:linux_aarch64": ["-lrt", "-lm"],
++        "@org_tensorflow//tensorflow/tsl:linux_x86_64": ["-lrt", "-lm"],
++        "@org_tensorflow//tensorflow/tsl:linux_ppc64le": ["-lrt", "-lm"],
+         "//conditions:default": [],
+     }),
+     textual_hdrs = _TEXTUAL_HDRS_LIST,
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch
new file mode 100644
index 000000000000..5fe8e7117c66
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch
@@ -0,0 +1,35 @@
+From b87687bace18200785b3f2686791c457f3ef72e7 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:42:48 +0800
+Subject: [PATCH 04/12] tensorflow_cc: Add systemlib nsync linkopts
+
+Linkopts dont get propagated up to the shared library correctly so
+workaround by applying them directly
+---
+ tensorflow/BUILD | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/tensorflow/BUILD b/tensorflow/BUILD
+index fce465ff1f2..4c0bd5a148b 100644
+--- a/tensorflow/BUILD
++++ b/tensorflow/BUILD
+@@ -43,6 +43,7 @@ load(
+     "tf_cc_shared_library",
+ )
+ load("@local_config_cuda//cuda:build_defs.bzl", "if_cuda")
++load("@local_config_syslibs//:build_defs.bzl", "if_system_lib")
+ 
+ # copybara:uncomment_begin
+ # load("//devtools/copybara/rules:copybara.bzl", "copybara_config_test")
+@@ -1293,7 +1294,7 @@ tf_cc_shared_library(
+             "-z defs",
+             "-Wl,--version-script,$(location //tensorflow:tf_version_script.lds)",
+         ],
+-    }),
++    }) + if_system_lib("nsync", ["-lnsync_cpp"]),
+     per_os_targets = True,
+     roots = [
+         "//tensorflow/c:c_api",
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch
new file mode 100644
index 000000000000..4395e0dc1180
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch
@@ -0,0 +1,71 @@
+From 11ca00ba2561f6466917a7bbb23fb266e5e31045 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Tue, 30 May 2023 09:10:03 -0700
+Subject: [PATCH 05/12] systemlib: Updates for Abseil 20220623 LTS
+
+These targets are header-only and just need stub bazel targets
+---
+ third_party/absl/system.absl.functional.BUILD | 22 +++++++++++++++++++
+ third_party/absl/system.absl.random.BUILD     | 12 ++++++++++
+ 2 files changed, 34 insertions(+)
+
+diff --git a/third_party/absl/system.absl.functional.BUILD b/third_party/absl/system.absl.functional.BUILD
+index a4f70acf35c..579181dec07 100644
+--- a/third_party/absl/system.absl.functional.BUILD
++++ b/third_party/absl/system.absl.functional.BUILD
+@@ -2,10 +2,32 @@ load("@rules_cc//cc:defs.bzl", "cc_library")
+ 
+ package(default_visibility = ["//visibility:public"])
+ 
++cc_library(
++    name = "any_invocable",
++    deps = [
++        "//absl/base:base_internal",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/meta:type_traits",
++        "//absl/utility",
++    ],
++)
++
+ cc_library(
+     name = "bind_front",
++    deps = [
++        "//absl/base:base_internal",
++        "//absl/container:compressed_tuple",
++        "//absl/meta:type_traits",
++        "//absl/utility",
++    ],
+ )
+ 
+ cc_library(
+     name = "function_ref",
++    deps = [
++        "//absl/base:base_internal",
++        "//absl/base:core_headers",
++        "//absl/meta:type_traits",
++    ],
+ )
+diff --git a/third_party/absl/system.absl.random.BUILD b/third_party/absl/system.absl.random.BUILD
+index 948de07751a..5ebd656be8e 100644
+--- a/third_party/absl/system.absl.random.BUILD
++++ b/third_party/absl/system.absl.random.BUILD
+@@ -51,3 +51,15 @@ cc_library(
+         "//absl/types:span",
+     ],
+ )
++
++cc_library(
++    name = "bit_gen_ref",
++    deps = [
++        ":random",
++        "//absl/base:core_headers",
++        "//absl/base:fast_type_id",
++        "//absl/meta:type_traits",
++        "//absl/random/internal:distribution_caller",
++        "//absl/random/internal:fast_uniform_bits",
++    ],
++)
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0006-systemlib-Update-targets-for-absl_py.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0006-systemlib-Update-targets-for-absl_py.patch
new file mode 100644
index 000000000000..683613ced2f2
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0006-systemlib-Update-targets-for-absl_py.patch
@@ -0,0 +1,27 @@
+From cb0eb12a89b18955c75e305f96069aeabf01a5b2 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Tue, 30 May 2023 09:33:14 -0700
+Subject: [PATCH 06/12] systemlib: Update targets for absl_py
+
+---
+ third_party/systemlibs/absl_py.absl.flags.BUILD | 7 +++++++
+ 1 file changed, 7 insertions(+)
+
+diff --git a/third_party/systemlibs/absl_py.absl.flags.BUILD b/third_party/systemlibs/absl_py.absl.flags.BUILD
+index 4049989ae2f..614938fb8c4 100644
+--- a/third_party/systemlibs/absl_py.absl.flags.BUILD
++++ b/third_party/systemlibs/absl_py.absl.flags.BUILD
+@@ -5,3 +5,10 @@ package(default_visibility = ["//visibility:public"])
+ py_library(
+     name = "flags",
+ )
++
++py_library(
++    name = "argparse_flags",
++    deps = [
++        ":flags",
++    ],
++)
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0007-systemlibs-Add-well_known_types_py_pb2-target.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0007-systemlibs-Add-well_known_types_py_pb2-target.patch
new file mode 100644
index 000000000000..c7a82e4d8c16
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0007-systemlibs-Add-well_known_types_py_pb2-target.patch
@@ -0,0 +1,28 @@
+From 7c532a426ca473aff233df19baef46fe9a2196a1 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Sat, 3 Jun 2023 16:23:51 -0700
+Subject: [PATCH 07/12] systemlibs: Add well_known_types_py_pb2 target
+
+Bug: https://github.com/tensorflow/tensorflow/issues/60667
+---
+ third_party/systemlibs/protobuf.BUILD | 7 +++++++
+ 1 file changed, 7 insertions(+)
+
+diff --git a/third_party/systemlibs/protobuf.BUILD b/third_party/systemlibs/protobuf.BUILD
+index 4d05ab28d12..b3d72b0e3ad 100644
+--- a/third_party/systemlibs/protobuf.BUILD
++++ b/third_party/systemlibs/protobuf.BUILD
+@@ -111,3 +111,10 @@ py_library(
+     visibility = ["//visibility:public"],
+     deps = [dep + "_proto" for dep in proto[1][1]],
+ ) for proto in WELL_KNOWN_PROTO_MAP.items()]
++
++py_proto_library(
++    name = "well_known_types_py_pb2",
++    include = ".",
++    srcs = [proto[1][0] for proto in WELL_KNOWN_PROTO_MAP.items()],
++    visibility = ["//visibility:public"],
++)
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0008-Relax-setup.py-version-requirements.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0008-Relax-setup.py-version-requirements.patch
new file mode 100644
index 000000000000..c2467776b33a
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0008-Relax-setup.py-version-requirements.patch
@@ -0,0 +1,55 @@
+From fa870249f5629d1e5cc3299c1a42078dd94343c7 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sun, 26 Nov 2023 10:14:42 +0800
+Subject: [PATCH 08/12] Relax setup.py version requirements
+
+---
+ tensorflow/tools/pip_package/setup.py | 16 ++++++++--------
+ 1 file changed, 8 insertions(+), 8 deletions(-)
+
+diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py
+index e64c0d4344b..62315962318 100644
+--- a/tensorflow/tools/pip_package/setup.py
++++ b/tensorflow/tools/pip_package/setup.py
+@@ -86,21 +86,21 @@ REQUIRED_PACKAGES = [
+     'astunparse >= 1.6.0',
+     'flatbuffers >= 23.1.21',
+     # TODO(b/213222745) gast versions above 0.4.0 break TF's tests
+-    'gast >= 0.2.1, <= 0.4.0',
++    'gast',
+     'google_pasta >= 0.1.1',
+     'h5py >= 2.9.0',
+-    'libclang >= 13.0.0',
+-    'numpy >= 1.22, <= 1.24.3',
++    # 'libclang >= 13.0.0',
++    'numpy >= 1.22',
+     'opt_einsum >= 2.3.2',
+     'packaging',
+     'protobuf>=3.20.3,<5.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5',
+     'setuptools',
+     'six >= 1.12.0',
+     'termcolor >= 1.1.0',
+-    'typing_extensions>=3.6.6,<4.6.0',
++    'typing_extensions>=3.6.6',
+     'wrapt >= 1.11.0',
+-    'tensorflow-io-gcs-filesystem >= 0.23.1;platform_machine!="arm64" or ' +
+-    'platform_system!="Darwin"',
++    # 'tensorflow-io-gcs-filesystem >= 0.23.1;platform_machine!="arm64" or ' +
++    # 'platform_system!="Darwin"',
+     # grpcio does not build correctly on big-endian machines due to lack of
+     # BoringSSL support.
+     # See https://github.com/tensorflow/tensorflow/issues/17882.
+@@ -134,8 +134,8 @@ FAKE_REQUIRED_PACKAGES = [
+     _VERSION + ';platform_system=="Windows"',
+ ]
+ 
+-if platform.system() == 'Linux' and platform.machine() == 'x86_64':
+-  REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES)
++# if platform.system() == 'Linux' and platform.machine() == 'x86_64':
++#   REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES)
+ 
+ if collaborator_build:
+   # If this is a collaborator build, then build an "installer" wheel and
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0009-fix-sparse-transpose-op2.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0009-fix-sparse-transpose-op2.patch
new file mode 100644
index 000000000000..26b61ac3e5fd
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0009-fix-sparse-transpose-op2.patch
@@ -0,0 +1,30 @@
+From 7961afc6f67a4278409f7bdb710180daeb91c106 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sun, 26 Nov 2023 10:31:31 +0800
+Subject: [PATCH 09/12] fix sparse transpose op2
+
+---
+ tensorflow/core/kernels/sparse/transpose_op.cc | 7 +++++++
+ 1 file changed, 7 insertions(+)
+
+diff --git a/tensorflow/core/kernels/sparse/transpose_op.cc b/tensorflow/core/kernels/sparse/transpose_op.cc
+index 4fe99013480..a247d417504 100644
+--- a/tensorflow/core/kernels/sparse/transpose_op.cc
++++ b/tensorflow/core/kernels/sparse/transpose_op.cc
+@@ -208,6 +208,13 @@ Status CSRSparseMatrixTranspose<Device, T>::operator()(
+   return OkStatus();
+ }
+ 
++#if GOOGLE_CUDA || TENSORFLOW_USE_ROCM
++template struct CSRSparseMatrixTranspose<GPUDevice, float>;
++template struct CSRSparseMatrixTranspose<GPUDevice, double>;
++template struct CSRSparseMatrixTranspose<GPUDevice, std::complex<float>>;
++template struct CSRSparseMatrixTranspose<GPUDevice, std::complex<double>>;
++#endif
++
+ // CPU kernel for transposing a single component of a CSR SparseMatrix.
+ template <typename T>
+ struct CSRSparseMatrixTransposeComponent<CPUDevice, T> {
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0010-systemlibs-update-targets-for-absl.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0010-systemlibs-update-targets-for-absl.patch
new file mode 100644
index 000000000000..8ba85f4f1019
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0010-systemlibs-update-targets-for-absl.patch
@@ -0,0 +1,352 @@
+From d224ce2be1a6b67ab53697ae978c1d29e6d3e159 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sun, 26 Nov 2023 13:12:20 +0800
+Subject: [PATCH 10/12] systemlibs: update targets for absl
+
+---
+ .../distribute/experimental/rpc/kernels/BUILD |   1 +
+ third_party/absl/system.absl.debugging.BUILD  |  20 +-
+ third_party/absl/system.absl.log.BUILD        | 271 ++++++++++++++++++
+ third_party/absl/workspace.bzl                |   1 +
+ 4 files changed, 288 insertions(+), 5 deletions(-)
+ create mode 100644 third_party/absl/system.absl.log.BUILD
+
+diff --git a/tensorflow/distribute/experimental/rpc/kernels/BUILD b/tensorflow/distribute/experimental/rpc/kernels/BUILD
+index f8757df41e0..d168d36cb2a 100644
+--- a/tensorflow/distribute/experimental/rpc/kernels/BUILD
++++ b/tensorflow/distribute/experimental/rpc/kernels/BUILD
+@@ -51,6 +51,7 @@ tf_kernel_library(
+         "//tensorflow/distribute/experimental/rpc/proto:tf_rpc_service_cc_grpc_proto",
+         "//tensorflow/distribute/experimental/rpc/proto:tf_rpc_service_proto_cc",
+         "@com_github_grpc_grpc//:grpc++",
++        "@com_google_absl//absl/log:check",
+         "@com_google_absl//absl/strings",
+         "@com_google_absl//absl/strings:str_format",
+     ],
+diff --git a/third_party/absl/system.absl.debugging.BUILD b/third_party/absl/system.absl.debugging.BUILD
+index 931ffdc9e92..223db7b4c46 100644
+--- a/third_party/absl/system.absl.debugging.BUILD
++++ b/third_party/absl/system.absl.debugging.BUILD
+@@ -26,15 +26,25 @@ cc_library(
+ 
+ cc_library(
+     name = "failure_signal_handler",
+-    linkopts = [
+-        "-labsl_failure_signal_handler",
+-        "-labsl_examine_stack",
++    linkopts = ["-labsl_failure_signal_handler"],
++    deps = [
++        ":examine_stack",
++        ":stacktrace",
++        "//absl/base",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/base:raw_logging_internal",
+     ],
++)
++
++cc_library(
++    name = "examine_stack",
++    linkopts = ["-labsl_examine_stack"],
+     deps = [
+         ":stacktrace",
+         ":symbolize",
+-        "//absl/base",
+-        "//absl/base:errno_saver",
++        "//absl/base:config",
++        "//absl/base:core_headers",
+         "//absl/base:raw_logging_internal",
+     ],
+ )
+diff --git a/third_party/absl/system.absl.log.BUILD b/third_party/absl/system.absl.log.BUILD
+new file mode 100644
+index 00000000000..9a2a5de657e
+--- /dev/null
++++ b/third_party/absl/system.absl.log.BUILD
+@@ -0,0 +1,271 @@
++load("@rules_cc//cc:defs.bzl", "cc_library")
++
++package(default_visibility = ["//visibility:public"])
++
++cc_library(
++    name = "log",
++    deps = [
++        "//absl/log:internal_log_impl",
++    ],
++)
++
++cc_library(
++    name = "internal_log_impl",
++    deps = [
++        ":internal_conditions",
++        ":internal_message",
++        ":internal_strip",
++    ],
++)
++
++cc_library(
++    name = "internal_conditions",
++    linkopts = ["-labsl_log_internal_conditions"],
++    deps = [
++        ":internal_voidify",
++        "//absl/base",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++    ],
++)
++
++cc_library(
++    name = "internal_voidify",
++    deps = ["//absl/base:config"],
++)
++
++cc_library(
++    name = "internal_message",
++    linkopts = ["-labsl_log_internal_message"],
++    deps = [
++        ":entry",
++        ":globals",
++        ":internal_append_truncated",
++        ":internal_format",
++        ":internal_globals",
++        ":internal_log_sink_set",
++        ":internal_nullguard",
++        ":internal_proto",
++        ":severity",
++        ":sink",
++        ":sink_registry",
++        "//absl/base",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/base:errno_saver",
++        "//absl/base:raw_logging_internal",
++        "//absl/base:strerror",
++        "//absl/container:inlined_vector",
++        "//absl/debugging:examine_stack",
++        "//absl/memory",
++        "//absl/strings",
++        "//absl/time",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "internal_append_truncated",
++    deps = [
++        "//absl/base:config",
++        "//absl/strings",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "internal_format",
++    linkopts = ["-labsl_log_internal_format"],
++    deps = [
++        ":internal_append_truncated",
++        ":internal_config",
++        ":internal_globals",
++        ":severity",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/strings",
++        "//absl/strings:str_format",
++        "//absl/time",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "internal_globals",
++    linkopts = ["-labsl_log_internal_globals"],
++    deps = [
++        ":severity",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/base:raw_logging_internal",
++        "//absl/strings",
++        "//absl/time",
++    ],
++)
++
++cc_library(
++    name = "internal_proto",
++    linkopts = ["-labsl_log_internal_proto"],
++    deps = [
++        "//absl/base",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/strings",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "internal_log_sink_set",
++    linkopts = ["-labsl_log_internal_log_sink_set"],
++    deps = [
++        ":entry",
++        ":globals",
++        ":internal_config",
++        ":internal_globals",
++        ":severity",
++        ":sink",
++        "//absl/base",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/base:raw_logging_internal",
++        "//absl/cleanup",
++        "//absl/strings",
++        "//absl/synchronization",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "internal_config",
++    deps = [
++        "//absl/base:config",
++        "//absl/base:core_headers",
++    ],
++)
++
++cc_library(
++    name = "internal_nullguard",
++    linkopts = ["-labsl_log_internal_nullguard"],
++    deps = [
++        "//absl/base:config",
++        "//absl/base:core_headers",
++    ],
++)
++
++cc_library(
++    name = "globals",
++    linkopts = ["-labsl_log_globals"],
++    deps = [
++        ":severity",
++        "//absl/base:atomic_hook",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/base:raw_logging_internal",
++        "//absl/hash",
++        "//absl/strings",
++    ],
++)
++
++cc_library(
++    name = "entry",
++    linkopts = ["-labsl_log_entry"],
++    deps = [
++        ":internal_config",
++        ":severity",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/strings",
++        "//absl/time",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "severity",
++    linkopts = ["-labsl_log_severity"],
++    deps = ["//absl/base:core_headers"],
++)
++
++cc_library(
++    name = "sink",
++    linkopts = ["-labsl_log_sink"],
++    deps = [
++        ":entry",
++        "//absl/base:config",
++    ],
++)
++
++cc_library(
++    name = "sink_registry",
++    deps = [
++        ":internal_log_sink_set",
++        ":sink",
++        "//absl/base:config",
++    ],
++)
++
++cc_library(
++    name = "internal_strip",
++    deps = [
++        ":internal_message",
++        ":internal_nullstream",
++        ":severity",
++    ],
++)
++
++cc_library(
++    name = "internal_nullstream",
++    deps = [
++        ":severity",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/strings",
++    ],
++)
++
++cc_library(
++    name = "check",
++    deps = [
++        ":internal_check_impl",
++        ":internal_check_op",
++        ":internal_conditions",
++        ":internal_message",
++        ":internal_strip",
++        "//absl/base:core_headers",
++    ],
++)
++
++cc_library(
++    name = "internal_check_impl",
++    deps = [
++        ":internal_check_op",
++        ":internal_conditions",
++        ":internal_message",
++        ":internal_strip",
++        "//absl/base:core_headers",
++    ],
++)
++
++cc_library(
++    name = "internal_check_op",
++    linkopts = ["-labsl_log_internal_check_op"],
++    deps = [
++        ":internal_nullguard",
++        ":internal_nullstream",
++        ":internal_strip",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/strings",
++    ],
++)
++
++cc_library(
++    name = "absl_check",
++    deps = [":internal_check_impl"],
++)
++
++cc_library(
++    name = "absl_log",
++    deps = [":internal_log_impl"],
++)
+diff --git a/third_party/absl/workspace.bzl b/third_party/absl/workspace.bzl
+index 07f49cebb78..a7f4e5ffc44 100644
+--- a/third_party/absl/workspace.bzl
++++ b/third_party/absl/workspace.bzl
+@@ -20,6 +20,7 @@ def repo():
+         "flags",
+         "functional",
+         "hash",
++        "log",
+         "memory",
+         "meta",
+         "numeric",
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0011-systemlibs-update-targets-for-google_cloud_cpp.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0011-systemlibs-update-targets-for-google_cloud_cpp.patch
new file mode 100644
index 000000000000..d6c135cc706d
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0011-systemlibs-update-targets-for-google_cloud_cpp.patch
@@ -0,0 +1,25 @@
+From 0264617528e53e6b9c8f298ec9bec4a064ffdf27 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Wed, 29 Nov 2023 13:35:24 +0800
+Subject: [PATCH 11/12] systemlibs:update targets for google_cloud_cpp
+
+---
+ third_party/systemlibs/google_cloud_cpp.BUILD | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+diff --git a/third_party/systemlibs/google_cloud_cpp.BUILD b/third_party/systemlibs/google_cloud_cpp.BUILD
+index cbe6e10ba5b..fce306a22f1 100644
+--- a/third_party/systemlibs/google_cloud_cpp.BUILD
++++ b/third_party/systemlibs/google_cloud_cpp.BUILD
+@@ -4,3 +4,8 @@ filegroup(
+     name = "LICENSE",
+     visibility = ["//visibility:public"],
+ )
++
++cc_library(
++    name = "storage_client",
++    visibility = ["//visibility:public"],
++)
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch
new file mode 100644
index 000000000000..e564fdbfd185
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch
@@ -0,0 +1,29 @@
+From 62c2fc13f69f6ae6a3315f59430018898b37b74f Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Wed, 29 Nov 2023 19:14:28 +0800
+Subject: [PATCH 12/12] bump cudnn frontend to v0.9
+
+---
+ tensorflow/workspace2.bzl | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/tensorflow/workspace2.bzl b/tensorflow/workspace2.bzl
+index da9295adaba..86a3df2fe37 100644
+--- a/tensorflow/workspace2.bzl
++++ b/tensorflow/workspace2.bzl
+@@ -174,9 +174,9 @@ def _tf_repositories():
+         name = "cudnn_frontend_archive",
+         build_file = "//third_party:cudnn_frontend.BUILD",
+         patch_file = ["//third_party:cudnn_frontend_header_fix.patch"],
+-        sha256 = "bfcf778030831f325cfc13ae5995388cc834fbff2995a297ba580d9ec65ca3b6",
+-        strip_prefix = "cudnn-frontend-0.8",
+-        urls = tf_mirror_urls("https://github.com/NVIDIA/cudnn-frontend/archive/refs/tags/v0.8.zip"),
++        sha256 = "d8dba9e2607a0c256aa8eacb45b39986ab6f3f24a4d431d4397047a3cb0cd4fb",
++        strip_prefix = "cudnn-frontend-0.9",
++        urls = tf_mirror_urls("https://github.com/NVIDIA/cudnn-frontend/archive/refs/tags/v0.9.zip"),
+     )
+ 
+     tf_http_archive(
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/tensorflow-2.13.1.ebuild b/sci-libs/tensorflow/tensorflow-2.13.1.ebuild
new file mode 100644
index 000000000000..646b0571695d
--- /dev/null
+++ b/sci-libs/tensorflow/tensorflow-2.13.1.ebuild
@@ -0,0 +1,453 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+DISTUTILS_OPTIONAL=1
+PYTHON_COMPAT=( python3_{10..11} )
+MY_PV=${PV/_rc/-rc}
+MY_P=${PN}-${MY_PV}
+DEP_VER="$(ver_cut 1-2)"
+
+inherit bazel check-reqs cuda distutils-r1 flag-o-matic prefix toolchain-funcs
+
+DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
+HOMEPAGE="https://www.tensorflow.org/"
+
+RESTRICT="test" # Tests need GPU access
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+IUSE="cuda mpi +python xla"
+CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
+for i in $CPU_USE_FLAGS_X86; do
+	IUSE+=" cpu_flags_x86_${i}"
+done
+
+# distfiles that bazel uses for the workspace, will be copied to basel-distdir
+# pkgcheck complains but do NOT change the .zip to .tar.gz, bazel requires the exact tarball (basename and sha256).
+# the build will fail if different archives are used.
+bazel_external_uris="
+	https://github.com/Maratyszcza/FP16/archive/4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip -> FP16-4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip
+	https://github.com/Maratyszcza/FXdiv/archive/63058eff77e11aa15bf531df5dd34395ec3017c8.zip -> FXdiv-63058eff77e11aa15bf531df5dd34395ec3017c8.zip
+	https://github.com/Maratyszcza/pthreadpool/archive/b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip -> pthreadpool-b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip
+	https://github.com/bazelbuild/apple_support/releases/download/1.1.0/apple_support.1.1.0.tar.gz
+	https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz
+	https://github.com/bazelbuild/bazel-toolchains/archive/8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz -> bazel-toolchains-8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz
+	https://github.com/bazelbuild/platforms/releases/download/0.0.6/platforms-0.0.6.tar.gz -> bazelbuild-platforms-0.0.6.tar.gz
+	https://github.com/bazelbuild/rules_android/archive/v0.1.1.zip -> bazelbuild-rules_android-v0.1.1.zip
+	https://github.com/bazelbuild/rules_apple/releases/download/1.0.1/rules_apple.1.0.1.tar.gz
+	https://github.com/bazelbuild/rules_cc/archive/081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz -> bazelbuild-rules_cc-081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz
+	https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz
+	https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz
+	https://github.com/bazelbuild/rules_foreign_cc/archive/0.7.1.tar.gz -> bazelbuild-rules_foreign_cc-0.7.1.tar.gz
+	https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip
+	https://github.com/bazelbuild/rules_java/releases/download/5.5.1/rules_java-5.5.1.tar.gz -> bazelbuild-rules_java-5.5.1.tar.gz
+	https://github.com/bazelbuild/rules_jvm_external/archive/4.3.zip -> bazelbuild-rules_jvm_external-4.3.zip
+	https://github.com/bazelbuild/rules_pkg/releases/download/0.7.1/rules_pkg-0.7.1.tar.gz -> bazelbuild-rules_pkg-0.7.1.tar.gz
+	https://github.com/bazelbuild/rules_proto/archive/11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz -> bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz
+	https://github.com/bazelbuild/rules_python/releases/download/0.0.1/rules_python-0.0.1.tar.gz -> bazelbuild-rules_python-0.0.1.tar.gz
+	https://github.com/bazelbuild/rules_swift/releases/download/1.0.0/rules_swift.1.0.0.tar.gz -> bazelbuild-rules_swift.1.0.0.tar.gz
+	https://github.com/dmlc/dlpack/archive/9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz -> dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz
+	https://github.com/google/XNNPACK/archive/b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip -> XNNPACK-b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip
+	https://github.com/google/benchmark/archive/f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz -> benchmark-f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz
+	https://github.com/google/farmhash/archive/0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz -> farmhash-0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz
+	https://github.com/google/gemmlowp/archive/e844ffd17118c1e17d94e1ba4354c075a4577b88.zip -> gemmlowp-e844ffd17118c1e17d94e1ba4354c075a4577b88.zip
+	https://github.com/google/highwayhash/archive/c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz -> highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz
+	https://github.com/google/re2/archive/a276a8c738735a0fe45a6ee590fe2df69bcf4502.tar.gz -> re2-a276a8c738735a0fe45a6ee590fe2df69bcf4502.tar.gz
+	https://github.com/google/ruy/archive/3286a34cc8de6149ac6844107dfdffac91531e72.zip -> ruy-3286a34cc8de6149ac6844107dfdffac91531e72.zip
+	https://github.com/googleapis/googleapis/archive/6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz -> googleapis-6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz
+	https://github.com/joe-kuo/sobol_data/archive/835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz -> sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz
+	https://github.com/llvm/llvm-project/archive/dc275fd03254d67d29cc70a5a0569acf24d2280d.tar.gz -> llvm-project-dc275fd03254d67d29cc70a5a0569acf24d2280d.tar.gz
+	https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/openmp-10.0.1.src.tar.xz -> llvmorg-10.0.1-openmp-10.0.1.src.tar.xz
+	https://github.com/mborgerding/kissfft/archive/131.1.0.tar.gz -> kissfft-131.1.0.tar.gz
+	https://github.com/oneapi-src/oneDNN/archive/refs/tags/v2.7.3.tar.gz -> oneDNN-v2.7.3.tar.gz
+	https://github.com/oneapi-src/oneDNN/archive/refs/tags/v3.1.tar.gz -> oneDNN-v3.1.tar.gz
+	https://github.com/openxla/stablehlo/archive/43d81c6883ade82052920bd367c61f9e52f09954.zip -> openxla-stablehlo-43d81c6883ade82052920bd367c61f9e52f09954.zip
+	https://github.com/openxla/triton/archive/1627e0c27869b4098e5fa720717645c1baaf5972.tar.gz -> openxla-triton-1627e0c27869b4098e5fa720717645c1baaf5972.tar.gz
+	https://github.com/petewarden/OouraFFT/archive/v1.0.tar.gz -> OouraFFT-v1.0.tar.gz
+	https://github.com/protocolbuffers/protobuf/archive/v3.21.9.zip -> protobuf-3.21.9.zip
+	https://github.com/pybind/pybind11_abseil/archive/2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz -> pybind11_abseil-2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz
+	https://github.com/pybind/pybind11_bazel/archive/72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz -> pybind11_bazel-72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz
+	https://github.com/pybind/pybind11_protobuf/archive/80f3440cd8fee124e077e2e47a8a17b78b451363.zip -> pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363.zip
+	https://github.com/pytorch/cpuinfo/archive/3dc310302210c1891ffcfb12ae67b11a3ad3a150.tar.gz -> pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.tar.gz
+	https://github.com/pytorch/cpuinfo/archive/3dc310302210c1891ffcfb12ae67b11a3ad3a150.zip -> pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.zip
+	https://github.com/tensorflow/runtime/archive/7d879c8b161085a4374ea481b93a52adb19c0529.tar.gz -> tensorflow-runtime-7d879c8b161085a4374ea481b93a52adb19c0529.tar.gz
+	https://gitlab.com/libeigen/eigen/-/archive/b0f877f8e01e90a5b0f3a79d46ea234899f8b499/eigen-b0f877f8e01e90a5b0f3a79d46ea234899f8b499.tar.gz
+	cuda? (
+		https://github.com/NVIDIA/cudnn-frontend/archive/refs/tags/v0.9.zip -> cudnn-frontend-v0.9.zip
+		https://github.com/NVlabs/cub/archive/1.9.9.zip -> cub-1.9.9.zip
+		https://github.com/nvidia/nccl/archive/v2.16.5-1.tar.gz -> nvidia-nccl-v2.16.5-1.tar.gz
+	)
+	python? (
+		https://github.com/intel/ARM_NEON_2_x86_SSE/archive/a15b489e1222b2087007546b4912e21293ea86ff.tar.gz -> ARM_NEON_2_x86_SSE-a15b489e1222b2087007546b4912e21293ea86ff.tar.gz
+		https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt
+	)"
+
+SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
+		${bazel_external_uris}"
+
+# absl/log/check.h is needed in tensorflow-2.13.1, see
+# https://github.com/tensorflow/tensorflow/blob/v2.13.1/tensorflow/compiler/jit/xla_compile_on_demand_op.cc
+# which requires abseil-cpp>=20230125, see
+# https://github.com/abseil/abseil-cpp/commit/92fdbfb301f8b301b28ab5c99e7361e775c2fb8a
+
+# abseil-cpp need to compile with C++17
+# abseil-cpp>=20230125.3 in repo are built with C++14
+
+# check flatbuffers version in tensorflow/lite/schema/schema_generated.h
+
+# BDEPEND: >=dev-libs/protobuf-3.8.0
+		#>=dev-cpp/abseil-cpp-20230125.0:=
+RDEPEND="
+	app-arch/snappy
+	=dev-cpp/abseil-cpp-20230125.2*:=
+	dev-db/sqlite
+	dev-libs/double-conversion
+	dev-libs/icu:=
+	>=dev-libs/jsoncpp-1.9.2:=
+	>=dev-libs/nsync-1.25.0
+	dev-libs/openssl:0=
+	>=dev-libs/protobuf-3.13.0:=
+	>=dev-libs/re2-0.2019.06.01:=
+	media-libs/giflib
+	media-libs/libjpeg-turbo
+	media-libs/libpng:0
+	>=net-libs/grpc-1.28:=
+	net-misc/curl
+	sys-libs/zlib
+	>=sys-apps/hwloc-2:=
+	cuda? (
+		dev-util/nvidia-cuda-toolkit:=[profiler]
+		=dev-libs/cudnn-8*
+	)
+	mpi? ( virtual/mpi )
+	python? (
+		${PYTHON_DEPS}
+		~dev-libs/flatbuffers-23.1.21:=
+		dev-python/absl-py[${PYTHON_USEDEP}]
+		>=dev-python/astor-0.7.1[${PYTHON_USEDEP}]
+		dev-python/astunparse[${PYTHON_USEDEP}]
+		dev-python/clang-python[${PYTHON_USEDEP}]
+		dev-python/dill[${PYTHON_USEDEP}]
+		~dev-python/flatbuffers-23.1.21[${PYTHON_USEDEP}]
+		>=dev-python/gast-0.3.3[${PYTHON_USEDEP}]
+		dev-python/h5py[${PYTHON_USEDEP}]
+		>=dev-python/numpy-1.19[${PYTHON_USEDEP}]
+		>=dev-python/google-pasta-0.1.8[${PYTHON_USEDEP}]
+		>=dev-python/opt-einsum-3.3.0[${PYTHON_USEDEP}]
+		>=dev-python/protobuf-python-3.13.0[${PYTHON_USEDEP}]
+		dev-python/pybind11[${PYTHON_USEDEP}]
+		dev-python/six[${PYTHON_USEDEP}]
+		dev-python/tblib[${PYTHON_USEDEP}]
+		dev-python/termcolor[${PYTHON_USEDEP}]
+		dev-python/typing-extensions[${PYTHON_USEDEP}]
+		>=dev-python/grpcio-1.28[${PYTHON_USEDEP}]
+		>=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
+		>=net-libs/google-cloud-cpp-0.10.0
+		=sci-visualization/tensorboard-${DEP_VER}*[${PYTHON_USEDEP}]
+	)"
+DEPEND="${RDEPEND}
+	python? (
+		dev-python/mock
+		dev-python/setuptools
+	)"
+PDEPEND="python? (
+		=sci-libs/keras-${DEP_VER}*[${PYTHON_USEDEP}]
+		=sci-libs/tensorflow-estimator-${DEP_VER}*[${PYTHON_USEDEP}]
+	)"
+BDEPEND="
+	app-arch/unzip
+	=dev-build/bazel-5*
+	dev-java/java-config
+	cuda? (
+		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
+	)
+	!python? ( dev-lang/python )
+	python? (
+		dev-python/cython
+		dev-python/mock
+		>=dev-python/grpcio-tools-1.28
+	)
+	dev-util/patchelf"
+REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
+
+S="${WORKDIR}/${MY_P}"
+
+DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
+CHECKREQS_MEMORY="5G"
+CHECKREQS_DISK_BUILD="10G"
+
+PATCHES=(
+	"${FILESDIR}/tensorflow-2.13.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0006-systemlib-Update-targets-for-absl_py.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0007-systemlibs-Add-well_known_types_py_pb2-target.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0008-Relax-setup.py-version-requirements.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0009-fix-sparse-transpose-op2.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0010-systemlibs-update-targets-for-absl.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0011-systemlibs-update-targets-for-google_cloud_cpp.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch"
+)
+
+get-cpu-flags() {
+	local i f=()
+	# Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
+	for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
+		use cpu_flags_x86_${i} && f+=( -m${i/_/.} )
+	done
+	use cpu_flags_x86_fma3 && f+=( -mfma )
+	echo "${f[*]}"
+}
+
+pkg_setup() {
+	local num_pythons_enabled
+	num_pythons_enabled=0
+	count_impls() {
+		num_pythons_enabled=$((${num_pythons_enabled} + 1))
+	}
+	use python && python_foreach_impl count_impls
+
+	# 10G to build C/C++ libs, 6G per python impl
+	CHECKREQS_DISK_BUILD="$((10 + 6 * ${num_pythons_enabled}))G"
+	check-reqs_pkg_setup
+}
+
+src_unpack() {
+	# Only unpack the main distfile
+	unpack "${P}.tar.gz"
+	bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+
+	append-flags $(get-cpu-flags)
+	append-cxxflags -std=c++17
+	export BUILD_CXXFLAGS+=" -std=c++17"
+	filter-flags '-fvtable-verify=@(std|preinit)'
+	bazel_setup_bazelrc
+
+	# Relax version checks in setup.py
+	sed -i "/^    '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die
+	# sed -i "/config_googleapis/d" tensorflow/workspace0.bzl || die
+
+	# Prefixify hard-coded command locations
+	hprefixify -w /host_compiler_prefix/ third_party/gpus/cuda_configure.bzl
+
+	default
+	use python && python_copy_sources
+
+	use cuda && cuda_add_sandbox
+}
+
+src_configure() {
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+	export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+	do_configure() {
+		export CC_OPT_FLAGS=" "
+		export TF_ENABLE_XLA=$(usex xla 1 0)
+		export TF_NEED_OPENCL_SYCL=0
+		export TF_NEED_OPENCL=0
+		export TF_NEED_COMPUTECPP=0
+		export TF_NEED_ROCM=0
+		export TF_NEED_MPI=$(usex mpi 1 0)
+		export TF_SET_ANDROID_WORKSPACE=0
+
+		if use python; then
+			export PYTHON_BIN_PATH="${PYTHON}"
+			export PYTHON_LIB_PATH="$(python_get_sitedir)"
+		else
+			export PYTHON_BIN_PATH="$(which python)"
+			export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"
+		fi
+
+		export TF_NEED_CUDA=$(usex cuda 1 0)
+		export TF_DOWNLOAD_CLANG=0
+		export TF_CUDA_CLANG=0
+		export TF_NEED_TENSORRT=0	# $(usex cuda 1 0)
+		if use cuda; then
+			export TF_CUDA_PATHS="${EPREFIX}/opt/cuda"
+			export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"
+			export TF_CUDA_VERSION="$(cuda_toolkit_version)"
+			export TF_CUDNN_VERSION="$(cuda_cudnn_version)"
+			einfo "Setting CUDA version: $TF_CUDA_VERSION"
+			einfo "Setting CUDNN version: $TF_CUDNN_VERSION"
+
+			if [[ $(cuda-config -s) != *$(gcc-version)* ]]; then
+				ewarn "TensorFlow is being built with Nvidia CUDA support. Your default compiler"
+				ewarn "version is not supported by the currently installed CUDA. TensorFlow will"
+				ewarn "instead be compiled using: ${GCC_HOST_COMPILER_PATH}."
+				ewarn "If the build fails with linker errors try rebuilding the relevant"
+				ewarn "dependencies using the same compiler version."
+			fi
+
+			if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then
+				ewarn "WARNING: TensorFlow is being built with its default CUDA compute capabilities: 3.5 and 7.0."
+				ewarn "These may not be optimal for your GPU."
+				ewarn ""
+				ewarn "To configure TensorFlow with the CUDA compute capability that is optimal for your GPU,"
+				ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow."
+				ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5"
+				ewarn ""
+				ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus"
+				ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'"
+			fi
+		fi
+
+		# com_googlesource_code_re2 weird branch using absl, doesnt work with released re2
+		# com_github_googleapis_googleapis
+		# com_google_protobuf is disabled due to https://github.com/tensorflow/tensorflow/issues/61593
+		local SYSLIBS=(
+			absl_py
+			astor_archive
+			astunparse_archive
+			boringssl
+			com_github_googlecloudplatform_google_cloud_cpp
+			com_github_grpc_grpc
+			com_google_absl
+			# com_google_protobuf
+			curl
+			cython
+			dill_archive
+			double_conversion
+			flatbuffers
+			functools32_archive
+			gast_archive
+			gif
+			hwloc
+			icu
+			jsoncpp_git
+			libjpeg_turbo
+			nasm
+			nsync
+			opt_einsum_archive
+			org_sqlite
+			pasta
+			png
+			pybind11
+			six_archive
+			snappy
+			tblib_archive
+			termcolor_archive
+			typing_extensions_archive
+			wrapt
+			zlib
+		)
+
+		export TF_SYSTEM_LIBS="${SYSLIBS[@]}"
+		export TF_IGNORE_MAX_BAZEL_VERSION=1
+
+		# This is not autoconf
+		./configure || die
+
+		echo 'build --config=noaws --config=nohdfs --config=nonccl' >> .bazelrc || die
+		echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
+		echo "build --action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
+		echo "build --host_action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
+
+		for cflag in $($(tc-getPKG_CONFIG) jsoncpp --cflags)
+		do
+			echo "build --copt=\"${cflag}\"" >> .bazelrc || die
+			echo "build --host_copt=\"${cflag}\"" >> .bazelrc || die
+		done
+	}
+	if use python; then
+		python_foreach_impl run_in_build_dir do_configure
+	else
+		do_configure
+	fi
+}
+
+src_compile() {
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+	export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+	if use python; then
+		python_setup
+		BUILD_DIR="${S}-${EPYTHON/./_}"
+		cd "${BUILD_DIR}" || die
+	fi
+
+	# fail early if any deps are missing
+	ebazel build -k --nobuild \
+		//tensorflow:libtensorflow_framework.so \
+		//tensorflow:libtensorflow.so \
+		//tensorflow:libtensorflow_cc.so \
+		$(usex python '//tensorflow/tools/pip_package:build_pip_package' '')
+
+	ebazel build \
+		//tensorflow:libtensorflow_framework.so \
+		//tensorflow:libtensorflow.so
+	ebazel build //tensorflow:libtensorflow_cc.so
+	ebazel build //tensorflow:install_headers
+	ebazel shutdown
+
+	do_compile() {
+		ebazel build //tensorflow/tools/pip_package:build_pip_package
+		ebazel shutdown
+	}
+	BUILD_DIR="${S}"
+	cd "${BUILD_DIR}" || die
+	use python && python_foreach_impl run_in_build_dir do_compile
+}
+
+src_install() {
+	local i l
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+	export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+	do_install() {
+		einfo "Installing ${EPYTHON} files"
+		local srcdir="${T}/src-${MULTIBUILD_VARIANT}"
+		mkdir -p "${srcdir}" || die
+		bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
+		cd "${srcdir}" || die
+		esetup.py install
+
+		# libtensorflow_framework.so and libtensorflow_cc.so is in /usr/lib already
+		rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_framework.so* || die
+		rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_cc.so* || die
+		python_optimize
+	}
+
+	if use python; then
+		python_foreach_impl run_in_build_dir do_install
+
+		# Symlink to python-exec scripts
+		for i in "${ED}"/usr/lib/python-exec/*/*; do
+			n="${i##*/}"
+			[[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"
+		done
+
+		python_setup
+		local BUILD_DIR="${S}-${EPYTHON/./_}"
+		cd "${BUILD_DIR}" || die
+	fi
+
+	einfo "Installing headers"
+	insinto /usr/include/${PN}/
+	doins -r bazel-bin/tensorflow/include/*
+
+	einfo "Installing libs"
+	# Generate pkg-config file
+	${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
+	insinto /usr/$(get_libdir)/pkgconfig
+	doins ${PN}.pc ${PN}_cc.pc
+
+	for l in libtensorflow{,_framework,_cc}.so; do
+		patchelf --add-rpath '/opt/cuda/lib64' bazel-bin/tensorflow/${l}
+		dolib.so bazel-bin/tensorflow/${l}
+		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1)
+		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3)
+	done
+
+	einstalldocs
+
+	# Workaround for https://bugs.gentoo.org/831927
+	export MAKEOPTS="-j1"
+}


^ permalink raw reply related	[flat|nested] 11+ messages in thread

* [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/
@ 2024-02-03 19:35 Jason Zaman
  0 siblings, 0 replies; 11+ messages in thread
From: Jason Zaman @ 2024-02-03 19:35 UTC (permalink / raw
  To: gentoo-commits

commit:     77b20ee55bb5699ecd496e3a68e27fb4d5399f03
Author:     wangjiezhe <wangjiezhe <AT> gmail <DOT> com>
AuthorDate: Sun Jan 21 08:07:44 2024 +0000
Commit:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
CommitDate: Sat Feb  3 19:33:04 2024 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=77b20ee5

sci-libs/tensorflow: fixing build issue with gcc-13

Tested on tensorflow-2.12/2.13/2.14/2.15 with USE="mpi python xla -cuda"
and gcc-13.

Bug: https://bugs.gentoo.org/905673

Signed-off-by: wangjiezhe <wangjiezhe <AT> gmail.com>
Signed-off-by: Jason Zaman <perfinion <AT> gentoo.org>

 ...xing-build-issue-with-Clang-16-and-GCC-13.patch | 39 ++++++++++++++++++++++
 ...4.1-0014-Fixing-build-issue-with-Clang-16.patch | 25 ++++++++++++++
 sci-libs/tensorflow/tensorflow-2.11.0.ebuild       |  1 +
 sci-libs/tensorflow/tensorflow-2.12.0.ebuild       |  1 +
 sci-libs/tensorflow/tensorflow-2.13.1.ebuild       |  1 +
 sci-libs/tensorflow/tensorflow-2.14.1.ebuild       |  1 +
 6 files changed, 68 insertions(+)

diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch
new file mode 100644
index 000000000000..9961d94cd48c
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch
@@ -0,0 +1,39 @@
+From 9081d1ccadb7fcd3e2dd01106e85003af2fb7975 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sun, 21 Jan 2024 00:26:21 +0800
+Subject: [PATCH 13/13] Fixing build issue with Clang 16 and GCC 13
+
+---
+ tensorflow/lite/kernels/internal/spectrogram.cc | 2 ++
+ tensorflow/tsl/lib/io/cache.h                   | 2 ++
+ 2 files changed, 4 insertions(+)
+
+diff --git a/tensorflow/lite/kernels/internal/spectrogram.cc b/tensorflow/lite/kernels/internal/spectrogram.cc
+index a832962a38d..9b1f86ba717 100644
+--- a/tensorflow/lite/kernels/internal/spectrogram.cc
++++ b/tensorflow/lite/kernels/internal/spectrogram.cc
+@@ -18,6 +18,8 @@ limitations under the License.
+ #include <assert.h>
+ #include <math.h>
+ 
++#include <cstdint>
++
+ #include "third_party/fft2d/fft.h"
+ 
+ namespace tflite {
+diff --git a/tensorflow/tsl/lib/io/cache.h b/tensorflow/tsl/lib/io/cache.h
+index f894c5916d5..e49d09b7450 100644
+--- a/tensorflow/tsl/lib/io/cache.h
++++ b/tensorflow/tsl/lib/io/cache.h
+@@ -16,6 +16,8 @@ limitations under the License.
+ #ifndef TENSORFLOW_TSL_LIB_IO_CACHE_H_
+ #define TENSORFLOW_TSL_LIB_IO_CACHE_H_
+ 
++#include <cstdint>
++
+ #include "tensorflow/tsl/platform/stringpiece.h"
+ 
+ // A Cache is an interface that maps keys to values.  It has internal
+-- 
+2.43.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0014-Fixing-build-issue-with-Clang-16.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0014-Fixing-build-issue-with-Clang-16.patch
new file mode 100644
index 000000000000..379aa7385df1
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0014-Fixing-build-issue-with-Clang-16.patch
@@ -0,0 +1,25 @@
+From 9db05c2eb25372b3be4badb15aa67d19298aeaf4 Mon Sep 17 00:00:00 2001
+From: Ben Olson <matthew.olson@intel.com>
+Date: Tue, 8 Aug 2023 17:15:50 -0500
+Subject: [PATCH 14/14] Fixing build issue with Clang 16
+
+---
+ tensorflow/tsl/lib/io/cache.h | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/tensorflow/tsl/lib/io/cache.h b/tensorflow/tsl/lib/io/cache.h
+index f894c5916d5..e49d09b7450 100644
+--- a/tensorflow/tsl/lib/io/cache.h
++++ b/tensorflow/tsl/lib/io/cache.h
+@@ -16,6 +16,8 @@ limitations under the License.
+ #ifndef TENSORFLOW_TSL_LIB_IO_CACHE_H_
+ #define TENSORFLOW_TSL_LIB_IO_CACHE_H_
+ 
++#include <cstdint>
++
+ #include "tensorflow/tsl/platform/stringpiece.h"
+ 
+ // A Cache is an interface that maps keys to values.  It has internal
+-- 
+2.43.0
+

diff --git a/sci-libs/tensorflow/tensorflow-2.11.0.ebuild b/sci-libs/tensorflow/tensorflow-2.11.0.ebuild
index 5a44042dcde2..7307e3bc3c8d 100644
--- a/sci-libs/tensorflow/tensorflow-2.11.0.ebuild
+++ b/sci-libs/tensorflow/tensorflow-2.11.0.ebuild
@@ -198,6 +198,7 @@ src_prepare() {
 	bazel_setup_bazelrc
 
 	eapply "${WORKDIR}"/patches/*.patch
+	eapply "${FILESDIR}/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch"
 
 	# Relax version checks in setup.py
 	sed -i "/^    '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die

diff --git a/sci-libs/tensorflow/tensorflow-2.12.0.ebuild b/sci-libs/tensorflow/tensorflow-2.12.0.ebuild
index b2050fa13bff..adc648f45814 100644
--- a/sci-libs/tensorflow/tensorflow-2.12.0.ebuild
+++ b/sci-libs/tensorflow/tensorflow-2.12.0.ebuild
@@ -200,6 +200,7 @@ src_prepare() {
 	bazel_setup_bazelrc
 
 	eapply "${WORKDIR}"/patches/*.patch
+	eapply "${FILESDIR}/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch"
 
 	# Relax version checks in setup.py
 	sed -i "/^    '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die

diff --git a/sci-libs/tensorflow/tensorflow-2.13.1.ebuild b/sci-libs/tensorflow/tensorflow-2.13.1.ebuild
index 646b0571695d..fb1e328f6e07 100644
--- a/sci-libs/tensorflow/tensorflow-2.13.1.ebuild
+++ b/sci-libs/tensorflow/tensorflow-2.13.1.ebuild
@@ -191,6 +191,7 @@ PATCHES=(
 	"${FILESDIR}/tensorflow-2.13.0-0010-systemlibs-update-targets-for-absl.patch"
 	"${FILESDIR}/tensorflow-2.13.0-0011-systemlibs-update-targets-for-google_cloud_cpp.patch"
 	"${FILESDIR}/tensorflow-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch"
+	"${FILESDIR}/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch"
 )
 
 get-cpu-flags() {

diff --git a/sci-libs/tensorflow/tensorflow-2.14.1.ebuild b/sci-libs/tensorflow/tensorflow-2.14.1.ebuild
index 5e4117a37653..83e63834a518 100644
--- a/sci-libs/tensorflow/tensorflow-2.14.1.ebuild
+++ b/sci-libs/tensorflow/tensorflow-2.14.1.ebuild
@@ -185,6 +185,7 @@ PATCHES=(
 	"${FILESDIR}/${P}-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch"
 	"${FILESDIR}/${P}-0012-build-use-non-hermetic-python.patch"
 	"${FILESDIR}/${P}-0013-installation-remove-cp_local_config_python.patch"
+	"${FILESDIR}/${P}-0014-Fixing-build-issue-with-Clang-16.patch"
 )
 
 get-cpu-flags() {


^ permalink raw reply related	[flat|nested] 11+ messages in thread

* [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/
@ 2024-02-03 19:35 Jason Zaman
  0 siblings, 0 replies; 11+ messages in thread
From: Jason Zaman @ 2024-02-03 19:35 UTC (permalink / raw
  To: gentoo-commits

commit:     e4bdcb0793343eeba56db58ddecf73a0eea0e445
Author:     wangjiezhe <wangjiezhe <AT> gmail <DOT> com>
AuthorDate: Sat Jan 20 06:43:03 2024 +0000
Commit:     Jason Zaman <perfinion <AT> gentoo <DOT> org>
CommitDate: Sat Feb  3 19:32:55 2024 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=e4bdcb07

sci-libs/tensorflow: add 2.14.1

Signed-off-by: wangjiezhe <wangjiezhe <AT> gmail.com>
Signed-off-by: Jason Zaman <perfinion <AT> gentoo.org>

 sci-libs/tensorflow/Manifest                       |   14 +
 ...dd-rules-docker-http_archive-bazel-toolch.patch |   37 +
 ...emlib-Latest-absl-LTS-has-split-cord-libs.patch |   32 +
 ...Must-link-against-libm-for-round-and-log2.patch |   29 +
 ...ensorflow_cc-Add-systemlib-nsync-linkopts.patch |   35 +
 ...systemlib-Updates-for-Abseil-20220623-LTS.patch |   71 +
 ...0006-systemlib-Update-targets-for-absl_py.patch |   24 +
 ...temlib-Add-well_known_types_py_pb2-target.patch |   28 +
 ...-0008-Relax-setup.py-version-requirements.patch |   38 +
 ....1-0009-systemlib-update-targets-for-absl.patch |  365 +++
 ...010-systemlib-fix-missing-osx-in-pybind11.patch |   25 +
 ...temlib-fix-missing-LICENSE-in-flatbuffers.patch |   25 +
 ...2.14.1-0012-build-use-non-hermetic-python.patch | 2745 ++++++++++++++++++++
 ...nstallation-remove-cp_local_config_python.patch |   68 +
 sci-libs/tensorflow/tensorflow-2.14.1.ebuild       |  446 ++++
 15 files changed, 3982 insertions(+)

diff --git a/sci-libs/tensorflow/Manifest b/sci-libs/tensorflow/Manifest
index 6e541231fe2d..8d8e59ab8231 100644
--- a/sci-libs/tensorflow/Manifest
+++ b/sci-libs/tensorflow/Manifest
@@ -6,6 +6,7 @@ DIST XNNPACK-659147817805d17c7be2d60bd7bbca7e780f9c82.zip 20341886 BLAKE2B 9aa37
 DIST XNNPACK-b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip 24168206 BLAKE2B ba6d612cb09823a655f065a76bd9b956a37664eade90aede2d30d9892f6bcfa2c6134f5332eb31247bea6b46e51f47250ae09a6810bde181c72f715550811f49 SHA512 29c844e758ec2f2913dc477866b016afc04679f16da5774069dabbc9373ed210d510c4b1205eb681de20669e49f34098f490340f5524139b079461589f41e7b0
 DIST XNNPACK-e8f74a9763aa36559980a0c2f37f587794995622.zip 18756888 BLAKE2B 0a1787166e8bbfda4aa6010075d92573112a21f3f9d3b1c13bc931fae6fa4cafb71685e4c57e86d7a662912bb6431c2d39a24378bf82361b50e5855d1b62f524 SHA512 a6802f0995742af0ca82de010cbd42da230b36cc884612d4ba2de20ba0ca56da6a11209bfb01ee1a5ddc31dc891a69438fa4836ec9d62d56e32c6aa144c6e7aa
 DIST apple_support.1.1.0.tar.gz 27105 BLAKE2B 6982ed0188760caeb6951dd28d211449d37a3192fa75e22f5ea86b599a5a92bf8efcfe5a549146533b725aa0fd41584c4d12db3fab41ffbcbca60f657e9590f5 SHA512 db291209ab9a54238b244e02abbca749a695ca3b9b9dc2e207227d3ea32f13144f3236fa921df4c6ba954850635db56584582d8916bdf4c90a2adc55dc90cd3a
+DIST apple_support.1.6.0.tar.gz 66375 BLAKE2B 7106e02676861b6ae4b0b42a12fb1fcde0470a99b49088beceabca87743430d6f691688aac5d4cf27e4c4e941781ee9d899fc7c3219095c00bbfe5b6eddafeb5 SHA512 e1d7a119d685fcfd9af1b9b48bb5685743af2d66e86d109575853172c1d9d7c1ce1beaa3fe65d21b55943d243506cdccadc724c603adc5566293b0c0846f874d
 DIST bazel-skylib-1.3.0.tar.gz 36103 BLAKE2B a58142b9d2a5da9f137705105aa735c8489519989ca7e633968114309f23074a56cd03b5fed70e284da63751d666904c2573940ad9a0feb2be689d695f0f07ae SHA512 ab3a0b465ebbfe07c139b92f1e8b2c0bcede66d6366d184891e3c0ccd6619164bc299777e7d7236cb463834b98426f6fb6890409e0ce94b75446dbd85854944f
 DIST bazel-toolchains-8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz 80728 BLAKE2B 340a295d8998d01eba6bdd3a97efae869c5dde93dee9bd9266af8ad92a00a5c84cafbc6dd1f5d20f78dfdaa59f7585cefc7af4b87df712489db7f76bfa1d5210 SHA512 cf391a756d2520c65423f991bd0afdf3aed1785b91481c55c5d3182e54b137fc7922fd179e758af2868e11f9f10ce9903409015a2fb0f18e67d14a3b073c6d72
 DIST bazelbuild-platforms-0.0.5.tar.gz 5399 BLAKE2B d53aa2dbbd526d15aef24914053a5fa7c7402e1201d94a603c8148281a933292e4815f01aae7f7f166d5325f40b14d2810d6d8e03e0780329c25299c3d8ebffe SHA512 332d5954c349a7d0b801d6338bc42193a730e8ba6c75866ccef17c5053f8f29e1d185cd33a73fe5758e96e57de0c2631974f45d18bdd58c825f9a5fc231ad8b6
@@ -22,6 +23,7 @@ DIST bazelbuild-rules_pkg-0.7.0.tar.gz 76580 BLAKE2B 77574785070b45609d12aa2c2dd
 DIST bazelbuild-rules_pkg-0.7.1.tar.gz 77334 BLAKE2B fef99181792dac840724d0cfe4f1d71ae77e16e9da0b2f938752e6971b04264bfb7d731998998b5637da774b5e67adb68cc7eb3c4f38a3933ef62f949d56553d SHA512 5b47922e9b60bf82ded612bf023d66d2c6786cc81abe6bc1653aa93400e3497acc2d92d5ff90f9f4ff757143ea0d66c1f8c8eea4059142889f9eb0d9073d9a80
 DIST bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz 14304 BLAKE2B cdd23ef47d247f6d1b9fbaa49edbda7e1cd55ad2e3616f43ff8d21fa42888c1f36172683e47beb3f3678a2b252d9b6c82fd692711e3133862eade8b64da06ea1 SHA512 024021816b4999b62db41709e0e9354ffdc88beb61a081b061d934241f06b1aa5be3b74324cbec94d1871e60eb65209b2e6b4bb8ca4a101218eaf6196ec6a974
 DIST bazelbuild-rules_python-0.0.1.tar.gz 2302092 BLAKE2B 1db52eebf2461d779f764f2afdd070d1d0dd65eb2b83ccd98c2831da1784614ca281b114064729a9f257c64eceb62975aac8362d231c84f32abdf19aee7a1852 SHA512 40fa069a4482e2f83e29dc8e109652d14d187b2ec8efdcd36e98d117de93d66a938ed74999b42a2293fcb6eccc0a111cbbcf65c5c155579214bb1b96644280a5
+DIST bazelbuild-rules_python-0.1.0.tar.gz 2490176 BLAKE2B dfb4df19ba787c3cb2c2b0ab7115b9678b64ba03b61b60af0253031333aef2ac37942e425ff724e3e759e5198e8ff45b55866a20f7b497f5735adb9f8deb1e72 SHA512 b83b35f5b200f115d9d5e89b2c81745dd834155f52be0ad2972007d4654ae9438f24c7bea3c9122e6056924a69b348ec3c53d649e092dbe5ae8af3b2119bbc5e
 DIST bazelbuild-rules_swift.1.0.0.tar.gz 199181 BLAKE2B 8261cf061ab630cff5bd0bf55c0b62252d3c7cc34a368eef80c0d1e70534dc43b5596077754306e87ba3e5bbc4b77710ba4934ff748079b8e03e72143e15deab SHA512 9e4acdd0a168462b5b480aad72cda9b3b16aaaf86fdf367c4de80dfcc093cb80c74f2f2219190730be10471d07c94c4f9bf8756a938fb9aaee9d1a4d983c4761
 DIST benchmark-f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz 204856 BLAKE2B a1601a38e71e08490f42e85e87675b2451082c531e2712f7e78ac0267c7fab0b6f1ac96fde34933d82380c61097a4467b277790657695fa51c97ac9504435959 SHA512 e4323f2e7b05566e7b634cc1ec18ae64c7cee1bf4ecdb2a3da97dec8393b1ef26580975e7823d4ee9d51d151db6456bc685717836eb3209574ada22a07451063
 DIST cub-1.9.9.zip 619595 BLAKE2B 265b797a906b03da886de88863236c9ab90daa31498ddf848fcaf5e5ee1342614ad9a41618120ca09cc4c0da3e96eeec5e20ca9d7ba3f9860c507f06d15e59e1 SHA512 8c9c0a3f66f8d518ec07f857b5625e006d52f28bade1c1478a0f37420e2f7586dc3ff029d551748a1802bb5544b16fde5388e8d5a45d61eec595201b9db7a30d
@@ -29,6 +31,7 @@ DIST cudnn-frontend-v0.7.1.zip 20112411 BLAKE2B 6f836f6b484e708d43833aef3ae52b93
 DIST cudnn-frontend-v0.7.3.zip 20124177 BLAKE2B 9c32d99d69d4c7bc96fd6189aa1c2cdfac6fa31dfe84beebaee0e791e7a27768864067159da4473f737612973388daf39c7770ad9c1270bed840221bb603fc4d SHA512 68f5dba9873b317d8239187b57e7b4306e9078e52ef0992e6f23982aa374eff6c2ef2232b6cfff8012f50d9105d6f61c84f7f7c9ab4139d4db451599f896e0b4
 DIST cudnn-frontend-v0.9.zip 20077185 BLAKE2B fcd9425be4c2ecc39db0fd92be355a7767b3d00cea990ff4b63ade3dff957f97a6e5fdb1e6f287f6473b2212a66e160940328062a70485c38d5619cf3cc2eb54 SHA512 f38fc912303f4f61ae76d3159ac51b105aba0f728e895b850a164480a729ec144bd8f99764db3e2203210dc114662aba4b4ffe0435d027c0cf620cb42a50df64
 DIST dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz 76170 BLAKE2B c28873deab233d73996137c254acecc4adb0a750cee675cfd0777ccdfa91ea704e338e7166705d47e775c45b46b152834268d89c0443a08c57b4b830bd07ac71 SHA512 e6a4fe9356b8f75f96e7f9960df40e227f8e5242e609f8cc8bf28e8161bd4f58e8c6de374d9cf216edf7e0e09ca502bc158d41c3058bc6e6e7b2bbfb9c5483ff
+DIST eigen-0b51f763cbbd0ed08168f88972724329f0375498.tar.gz 2848019 BLAKE2B 005e353f101ee9db0a794843b03169e35d0b74867b7c5638036b973ec2424539646f61e063c892de577e04c78e647c8cb11ede67134b7a8b0471286be7429dfc SHA512 9885feb9ae493bb8860799c69d795df199f3ab84afd2bed5b1e71fff221ccd22023b512b4ab763e1b0c684a93d20d5d202088246fc5ffe211f0995e3839ece55
 DIST eigen-3460f3558e7b469efb8a225894e21929c8c77629.tar.gz 2833459 BLAKE2B f624102a174d80860314f0e895f641fb86369a2df88ba2e2589da86e5ff0802b36c64be44211f6013997e0251d74bb28051e3e7edcc6cc43ab88448524db3152 SHA512 c015dae59e0df1f25b6caef5f3c724cfd91cd440df6c3eba1ee7c129b20caf9ec4010209cc5edb82ed6534503a697ba6ee49f64be0359de332ed023cdede05cf
 DIST eigen-3bb6a48d8c171cf20b5f8e48bfb4e424fbd4f79e.tar.gz 2810518 BLAKE2B 97c9221024f765e6899c676602ee2c850fae661dad613957cead4bce29fce8d9cbb1ac20b812b71c699feea75768be2da945fc39e9b9e9cd2e5b3c6bcf034c60 SHA512 de2c35d3ab859021dac9de35b83cb94e824814e436cd40e45ca2f5f7f9fefadac2b4222f05de9eb1b03d04a29c751f9da3a2b804e6c0fc97b4a5508f25b1e7d4
 DIST eigen-b0f877f8e01e90a5b0f3a79d46ea234899f8b499.tar.gz 2833536 BLAKE2B 04bb103b64fa8c81ed337c67f5484fb15097f03905a504b19ebeaad4b33ab75baf1e3a2e060c91f1974272f55998555cd16c3da9d8a54a725aef39da7d39dae0 SHA512 1b239db63199aa6f3f9c35198294aff5b89c817befe6f394d69d2f4d6c6e3d946fda32119592da0d7894ea8b4fff12a1c1b8c5eda2e52f7365dc5aedda11f90f
@@ -38,20 +41,25 @@ DIST googleapis-6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz 6091152 BLAKE2B
 DIST highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz 160745 BLAKE2B f2ffe8f13b1a8346b5b29e719c0a57f34e1fa6320d0f2d0575e4e97e813ed1a8f55acfb44b57aba70ea7d372ade18aee5ef82a881ecf457f580ffc92d6528c7b SHA512 4d6a7e8e321df82c009a0849b8da097a551f6c35a49fef65b89e731075a6af624918c0e55be9fd3a5bf07c519ab09bdefed57e0e39d4df0e79e189c468939de7
 DIST kissfft-131.1.0.tar.gz 52383 BLAKE2B 74e6d2e7d132a311b31c28a468e13d9772a53f0ea0abed0e0f49d8db9c183fb0646f58fd38df3e797b8577285899daf6b80446b149ce2582bb828410656d96df SHA512 bd715868ce0e93a291a0592fb1f8b960e832fc64efe863755e52b67d5addff9bcb444a1bf2570d1914c52b41dad1023d0d86400f5ea30c9fb84cd6b4f7210708
 DIST llvm-project-10939d1d580b9d3c9c2f3539c6bdb39f408179c0.tar.gz 179559452 BLAKE2B ccdf998502aea7e87ba128560f5458b2a959d32a13f56dc795b8a0ed794c0348ca035ca601a34c0c288990f358dc5c3c01712b7458ebd15c48b1c41b4413fcd2 SHA512 36997be5da7caeaf949ae093b3ec1d81dda668087cc94de1fee9f5262a019f40fca61e652787e3f9498cd8b021b8ffc8002daef189ae0e15fda281ef6d56ecd7
+DIST llvm-project-668e33c6401abe7844691fb7d47a3cf2d2012dbc.tar.gz 195368050 BLAKE2B 4fe9c27ce35e579e36f1bdca7281206eeb530eeab00089b4c71834f7b47c96e8f951f3ff8477e2946a30c21cd4dfee5a9b485282e5f4f449a70ad7652f378a45 SHA512 6ef3796c8931503f8e7000087262feb011995c1df79f7a7776ab508e9fb37a7bf5bad471d3317d550142b68818a1b3a26d39e23214a3fff852de0c763cf05b2f
 DIST llvm-project-d8415b02a519f222ecf71b069c96cc85ac635de3.tar.gz 169045379 BLAKE2B fe25f9c889c5159fbc1d251640f65b3097b4260ec7b27d5133843502ee397995c1075fb10f1a6519c6f693757ab8fe0fe2b82bb96678ef4ec4086c09ce3c90c3 SHA512 546edd97778b4298d7bb645620010e00569813fab07b925a909db4cdd8feb9adc4898e488f7bb31e70587d7b4134820a3f49a98d4f87bcf1dcad9adf5eed7e4c
 DIST llvm-project-dc275fd03254d67d29cc70a5a0569acf24d2280d.tar.gz 182909064 BLAKE2B ba2a2db104849d1b09115cc2decdbb2e5dc84c58b61074500ff728e29c2f380a0818a4e8df22f4a1552c04e243dd114862492d7f8df06132348034c500200e14 SHA512 4f51271b765a666b023547382f3f983453afbfc69b793336e381e335d6103978292e781f86fffe16cba8b6d6ea309b64e6d899570060c275779aa0a2b90948c7
 DIST llvmorg-10.0.1-openmp-10.0.1.src.tar.xz 955492 BLAKE2B 4197ecfb2e3498a95a2ba5923f2b4bdafbab645ddf2d3f1875d39752d3ab7304fb35bce918d2dc6e40e5ea809545ae0907d6bc0b94a3d68b8d96da48f5a62adc SHA512 5b6f6487feaabd2a18ef2bbb1a5f86bb567e264f9fdf56805cfdd3f65f36f21e7c1392ba5170fafb52a395fc1606b277233beba3df31dc2ab825ef8924e7a05a
+DIST ml_dtypes-5b9fc9ad978757654843f4a8d899715dbea30e88.tar.gz 66243 BLAKE2B 541ce280fff68b51eb9e7f9eaff7c03d0c095ed5b219d3ca3d61c31650a21a63bae6fd6a8efddaced6de4d2601d5a7c6924d300d120c30907ea5e239c00ec70d SHA512 78f7e25e37ea30b0dc0cfd0dec4e03d4e208cbf223c239fa386eec9f9912f1adea290eefcca7b15b73f2329c457b34fef4374fb1ad0f3cedb02b310e0eb9fdb6
 DIST nvidia-nccl-v2.13.4-1.tar.gz 287269 BLAKE2B 8719e26978225a9697101fb7998388c3179bb0af5c396602689242e5529492ad7a81a142e3a8492c9fa4c92adc251c58e67f97fee64a4fd1a046134ac7d737d7 SHA512 5f7077f69a0c1624a1c8ca3d2f503d8269751e26cb6ee63e1a165fb86877b62073ec4e182d939b9aacce4ee8bb8295a39d1b6d65ef3dc0fce795735341a13fc6
 DIST nvidia-nccl-v2.16.2-1.tar.gz 326883 BLAKE2B 86db7adc67ba311b72e7e013dbc2a04918c0746c1fb62079ccd3300691479e1f6e35e379d6ee4320e343666b68372c56607ae521f5ff2d7e59d5f4dc3b894097 SHA512 e6572c2e7adc03053048c0b1e5290ffaf6f294239d78038887582c847aa549e5e95c7970b943f1d0b8964c32b4cdee3785bf40886f274907b613f320e9de10d0
 DIST nvidia-nccl-v2.16.5-1.tar.gz 327261 BLAKE2B abeeb6a2d4b58647ecb17694d92f79e650d2f2ffbccf26682ab202e17a1b7d3c356fce26d9f6edffee0756d71887bba8a9d5c254ad433d3b4ae8babfe3294534 SHA512 fc13e83e2339921b732c02250e95614b21202c52461aa262489714af6d92117aa5c0647bb0dcc3394cd357d4b7e8a76fe4c3a3567ba4512c359f19e2ff41de4d
 DIST oneDNN-v2.7.1.tar.gz 6405831 BLAKE2B b43253f7bc1be0bca51746f06d825191ae544376b259662cbf8f567d8f39a6befde3c88a14744e053b851d2f89fb2600b999abef1acb585bc116d6fa0c95fe3f SHA512 062e97ac613d265627ec1f010aa1d101bf71c449c813187c26244c66c9e6b9b582a0a0a823a9391fa828f396051318fada8263ff64c4f4b4bb6ca1d7a08ea6e1
 DIST oneDNN-v2.7.3.tar.gz 6410473 BLAKE2B c6730100e0438d456eb4986f416ae2bd1f173a80c52d5090523af06790afae8ee17cc58ffa8ed7215cd0eff99191a925d8cdce9986c72ccb8ebffacedc434b18 SHA512 ad9450f8b701288fa1721f64d0cb33fc344e9fc4f84e517b3d52377189ffcd0c5b56156ef4a33ca3ffe2da886abcc7ac5b2a3407cc155bd4be2223751b84f7c9
 DIST oneDNN-v3.1.tar.gz 7556565 BLAKE2B db6865410e902778c0153c50cc1f16c12e358b360d7e865207a86489d42727e66945d422b8bfa52b04b5f2b34daf585f1472a031cd8810a36c6724a2779120c1 SHA512 2053157a3885618364a9da5ec738c9cc2dde15db1ce9737578565d25c4a15a65944db3bbd17780de2032cfa2329bea4cb0af24ee428c9c246bdfa07a5bdde30b
+DIST oneDNN-v3.2.1.tar.gz 9186820 BLAKE2B f85cb1b410c3f57e098106ca13939c8c93c396e012b15a63c4f728ba75138a6f371db5fd182a54711479beca8f215578ea52d9c3d07be36647f6befb6c16746a SHA512 115819dc47fce5ef8fc7403f88e141743b360bc33243c90740d1b3871849ac379930d23e3e1d62d0abaaa3af5d2cdbd4218b80aa1be1edb09d0d949f7532a559
 DIST openxla-stablehlo-43d81c6883ade82052920bd367c61f9e52f09954.zip 27954369 BLAKE2B 30dddfcf2102e344d82171d8fcb2df68a3c2dedfc349a3f248c060e591535127d7716e1bf10c5eef20369eb0d81a6cc0eb5350a6979adb8a164b7bda62d6c745 SHA512 2432e4256bfd2d92ba717895967d87e05bb0201a5086314b1de5fe9078bfea37c14245b88b720ec82f2906751ab344da0dab9f714a6fffe79a0423cf7659e5ac
 DIST openxla-stablehlo-51f005f0a8ff6e28f535adfec4de936cb4097aa4.zip 6902721 BLAKE2B ef9766377a38f816f5a6dc60f34d5300b2775bc282084e9f34c7a5ccc6104a0154d44f2c57aba081889de50fc141a6059255fca3f681322343e316289d6540d7 SHA512 ffe46e21be6f617b6ecbc7ef35e83d441256e429150af60451cf04c02085fb1a0b60a9687d8d60d6f1f9321e6f6a92f24749a3c1cf1ee694a8ffc0fcd13f64f4
+DIST openxla-stablehlo-9ae6c373a6e2941ff84a8831bb3724728cb2b49a.zip 17784280 BLAKE2B 99bd410d36d78c4dbefef46d7df137b0bf583cc9cb2d34832f3d95360237647a05511c904ce23030d23ce6d95c34af11c29085db9f775aa96a702c28cec1891d SHA512 b098c2ec986ffae14f916a62095561942a809f80d2159005fbaa08691532ae2a3383b11d8672907b116fcedcf21c88ad86f3f4911b666a66543eab16fae06d86
 DIST openxla-stablehlo-fdd47908468488cbbb386bb7fc723dc19321cb83.zip 301572 BLAKE2B c8242b3d9612fbdfa1c34ae5cb610aadd68464498e6cc84d48bcc38abb9e8636fa31b9a03b5a604a29cafe12a47e5b7be90c48d52fb6587bcd376d6307775197 SHA512 61b89d0dafe450ae95d770878385b5ed3cbb0109b79cf5e01304119f2f52255ccc83cedc542cfa5c342692945833b86606468224e67c6ff9dd6f7216b86adc7a
 DIST openxla-triton-1627e0c27869b4098e5fa720717645c1baaf5972.tar.gz 985789 BLAKE2B ef3aa4d8033757d37a9ecde3a687a261c5ecde1200b30db1ae24cc5725c334f8662e741041be1e854ddb2cd5d9cb2b9d5f254d9219e8035c5f08df92b3ee9fab SHA512 67ae9075219e142ec3f3e5c3f05cff5fb15f3ef9605bd1c0dbb1e26940bf1e428a9c17b0d5f85c069844f6ea94b8a3ce94277bd4cd344238fbbdc3f56f91e08f
 DIST openxla-triton-2c3853269281da6742cf469a5ca5772947d271ce.tar.gz 459751 BLAKE2B 8b1b314fd1b6d8822a84cb9cacfd70e2c59784a76f879d75c910f376d38fbdccbc132ebab7f29c5bddde04afd7821c0322311d97c55fcfcc87580b82039a7efa SHA512 cedee3b982b93ae237a1e035ef2a9447aabc94ea4add63959b927670006b5cf546f064d5741ee9e731de31c343ed5869abe9c479d07360b07ef2b26f93081a6a
+DIST openxla-triton-cl546794996.tar.gz 948559 BLAKE2B 9c2ed46364b4986c39466803f14ec5618cab0cbc504f53909f74eabf7f6d5e5f4f6fcf1d19965f48b38f18dc99f26fc02ecc7275f05194b228e281988bbb4cea SHA512 680774ffb6cf291bb0f7bd851d4cb66d4e40d70ce2761441ac17595fb98fee6cb013fc5d4f8ca33d79f7b09f2e2924e50c027a09e7250d72767c59a119e56143
 DIST protobuf-3.21.9.zip 7156366 BLAKE2B 464ec84fd380d2d472cde5b7dd978c31ac8dc455934b7b1f7afe7dd836579ff74c1196d56dea2944fb41e5ef2f2e05683b889b54e4a3a11bb7cf87a0cd57f691 SHA512 311e0bcd3da54b8fb2df4de03a9b465cd56451018438e4d72054d78e70c37c70ee3c8a0378a177acb737ec863bdf9250d10bcbcdc0f97707c7edf97b9b37683b
 DIST pthreadpool-b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip 61524 BLAKE2B 924419730bc6b94ec98a13db94f177b3b70d6c763158f43fb6f9e45605e73cfce238e6c996e2bf629dbb2a5af93ae99849ddc91174fc4664d702667f7423892d SHA512 d25262b47e39058d5aa8b8197c4984a5b941587a19079a2210f738c3af34ab8e8477731c88ca80c3f812a6a04635f80300124d93cc1e099352ef2aca04bdc3ae
 DIST pybind11_abseil-2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz 28202 BLAKE2B 58a13d005367d938e9fc17da6435a8c2706722d0770c173cbfc433b7ea4de7e7d1b97653c5859cc8c436cccda3b8d21df906249a3a60ee4bba7cc6601abfaa59 SHA512 91befca03fa1b4f12446b84d5fe3514df6c9e352a19042f4f8856f6df306229f23b6ca2976a17ab22c8dd5afa223a44013e54a4348298c305a7688646129f0a4
@@ -60,8 +68,12 @@ DIST pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363.zip 76830 BLAKE2
 DIST pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.tar.gz 3515639 BLAKE2B d4adc49ea1bcbfd7a7efb13cdfea6a1d9cf717b06209c03342f92a3d624de65bcdf2ce7aa7fa8bd5f95ad423447ee833fdea718e16f98037344df75de8bde943 SHA512 f4c0ce922cee0df62c15a335889bb59b5f70ad71448b42b9c1bfa9b5e77c4c4d5f1613f607f32fa9d6817d0d5f49c554e1378079a1cd66a9cd2492796d48c3c2
 DIST pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.zip 3812878 BLAKE2B 30048677534192f1e288c69be5a0373844206cc4e209d48b92f5bf38da37003bdd5125b6588ec0f34acd855acd9cd16193725976ede644d3140fbbcf03d2d364 SHA512 963fa6c6948102d15cae1db90645e9cf85d1efc0fd541f75dfff7d6efe62fdd196085910cdb366be56b7e71f36df98edd211fc875aff6eb1e9962e0d62f43667
 DIST pytorch-cpuinfo-5e63739504f0f8e18e941bd63b2d6d42536c7d90.tar.gz 3512335 BLAKE2B 71c14866fde3846b5f375b39fe2f081a6c219b36fc0721640971f99c53ca77c8e7f8df83992d777af32a28c645d64f27dca838bd8e60835c5497e131467b22d0 SHA512 6a61f4574661a55771c2ec31bb0919a51d0bd8c770477b254a5c14dc5323716af275c7fe3abc5aa96720d7cc929559ca66f614265d3940e076b8db2fa15c8e36
+DIST pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.tar.gz 3516012 BLAKE2B 78845105e55397d3e948382bac22fff6a639c2d27c096176f29b9b29c6e1e2f78a8ffb44eddf5e20b4d08e2f5dbd7be949a6b55ffe0ca754a00794db8de540a3 SHA512 53b687196b8e46bb99469bbf37f8141c3ee89be78bab67accc63af849207a0234447304b7fa63fb44635add0ddab585df337130acb85fd7b026c0990e12a5840
+DIST pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.zip 3813501 BLAKE2B 7b4c54fd6a21b9845dce1f1834eb07613b165ca3fd8ac132bfb3a6964354af9910664f77601f7b839a8770036a1b2a2b21befe3a51d2e6c1e0c6400abbcc952a SHA512 1f697dd26b01bda1e21bebb948fdc2c224455910f55fba7327533b131c016f7cb51eb00804d6d765b37b4614c9093243898363482b3e37e427f83941b0c88f48
+DIST re2-03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz 396457 BLAKE2B 33b90f851c4a4ba634bcb907e8f32d21f6792e0fb680d38034d02f469e499380b0b458ad75fa42f1ad931cda314f7fb0ba0798ba085e176e5f2e38a67c8e14d7 SHA512 d573150b7a6d8fa3e970f0e245501d7653944fd406f2dc4016c7126c9602be988ba18d0c8b0296684dd30f4a3b9207284db5df4ef5f05e305160b845209f23d0
 DIST re2-a276a8c738735a0fe45a6ee590fe2df69bcf4502.tar.gz 306766 BLAKE2B 54162a22afe10b392e30864729b3b210194c0dbf7926cc3536dfe3afd43e0b8abf3d01b08e0feb71a8ade19cd497aea9e9b7b34eacb85e10cc7e1c5fd62a407d SHA512 9557830cea13485bd332ccdcdd3735ea63e6bb41f4cf943ecc24a1d79b75a58c5894cfe70c4a35a07a1b2b1f3d2ffa76fbeff168fda17c72b3436cb6213121dd
 DIST rules_apple.1.0.1.tar.gz 2198467 BLAKE2B 79178efe1acfa36af3f41b31610a5add8bd9c35529931415ab45cc1588c4fea477ddf0cd67c5d799de688db049fe2f3ce776c5e5da2e1dde1c329efc44d51ec0 SHA512 a24d880dd8174241b2808424fd3364f8b0ba9865109704981ad68f383d983bab9f8e8923942423e70b0a3af6951d16f5712647692a8ca3bef4347057f3536cc6
+DIST rules_apple.2.3.0.tar.gz 2222651 BLAKE2B a0a0e153eaa16be0ace362c1738b3f5ff87a3e76bd6fbf65c7d76e68d33b40d99e86045cf0bb104644fbf076024dcee4082303b823e12cbce6675202d93c29d5 SHA512 e8f6c164451a28ad70a9fdabfc2540a0165de1d23e2bdb7ec06ddf2e455f55cf82b8024be0e88153fca44433ae84144cc4054974737c8f6374fc88de37338394
 DIST ruy-3286a34cc8de6149ac6844107dfdffac91531e72.zip 381045 BLAKE2B 00cf2a009ff6cac8e3e8c3380c3fdb4fe9299614eba56bfbf0b2b8dd908ec2ec7d58b8185810899dd8bac80cc990d69a26e01eed8562f73c5fc08b8b3ad198e0 SHA512 4acb4dcee41788c3f2a65a4335f64d20057980289f231a401ea45c27dcd16bc2e48d0748d6ad35e77c3212104c54353193d4710260993ae8927dce24a6ef435e
 DIST ruy-841ea4172ba904fe3536789497f9565f2ef64129.zip 379664 BLAKE2B 82f54b4e7959ca2ff489cf0eaa7c01c5084b11174a43e2caa8f30dcd3951fb9552e513fa0488190fa73dde62719bfd8e4be59bd264fe316ec5b9852db2494ed2 SHA512 e10bed1901eb53cc0174d8723b67b7ff4f7021b5d94e8e7596879a9a625e77948f265d430b5c56f7789030874ba08bdb5263796212d9b60affd1a20694ec3317
 DIST sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz 2569603 BLAKE2B cd66dc8a0b4ad3ea586a79ef588c8d68e4f05b6ea89e6d8119b5ee338b9046c11a747ca57c2f80a3c90fab119c05e85f5965571c9e522ccb8a312b95804d1a36 SHA512 b819d050bb38246b503f1eb3d6e7d878d0f166f3602ae3c327749b1f6ddc2b1d6ac7f768b1f1d055b98b7b98812e4643a75bcebc9728c7f615d67ca739761b3e
@@ -69,8 +81,10 @@ DIST tensorflow-1.15.0-python-license.rst.txt 45132 BLAKE2B 8a8ace4b64fd9eda1932
 DIST tensorflow-2.11.0.tar.gz 67996769 BLAKE2B 539a8d7084280023c7b019a079aad6bdf0fa94b22276250a02913fb0ad496b4af2115276152b4f37101547404b616de58f209b9d1036e5d4dd9b0f7072a59ba9 SHA512 cda16db72a0ede72ac9f5e76c3a745ea9d72421fa40021303032f8fc3ac2755f64524f97a4629c18cf888f259027439b49ec921e0f5fd329a6ba060235a658d5
 DIST tensorflow-2.12.0.tar.gz 69710128 BLAKE2B 582b7b2717edd0ce41ecd74525fd38684d375cf1833c206cb53fa10ef964bb41ef8f29fa947a1f77e892bb68793d53c567bc9c4c9124dba94913f57ddcd3b1f9 SHA512 9273720b5be08e5d3dc76aafa4af6b27a2d50afd02b181e7632f3d70961995b2e0e5acb13e70c9b704ef475617c23d70047fbe74d5b63b156cf8f2fa8a856b84
 DIST tensorflow-2.13.1.tar.gz 70663056 BLAKE2B fcaff251ae3757f7e20deb07566485ca243f943ce118886edcfea3c41aff8baf83b98e5e3eea97c5243cb5db2e7418ec58687b3099dffd5c750395b554689343 SHA512 4f7bae717444a34704cb53466051a5708be13342a193be14914fbddd4790e327f3b6c37063a36d1c7835c50cf99500895aaffc646fdb9b421e6b873dfe4b2e46
+DIST tensorflow-2.14.1.tar.gz 72760001 BLAKE2B dd443c087bbe524b8b6dd6e9f08ec1c7bbc406e2ae7f854573fd29212004f927daaf8115a66f1c11c97da2e6f40a44ccb0e4f8e28455e1bf94872d630277e4bf SHA512 c5e9a176027a00b5efb1343bee000330f56229a1a8559db2fb9e2c9388afaf8420d69b6fd6e7b85811272c110245315935232a859e9fd4106b29b226780c447e
 DIST tensorflow-patches-2.11.0.tar.bz2 2977 BLAKE2B 53672704ccfc5291f7070421af9f7246d2f211689b18f35917d4d166ff5e9ddb623db4dd9dc8054e0f2262b162dd8c2216446c6ca5e2bf538872debf8eb8aec1 SHA512 866c6abb78934c1a645ab3172f93d81423e2023fa1e8688255ef0777e340d810a6889c838b841be765f0897f7a269c4d6cb52b8f59af114bf5b9e181b1348590
 DIST tensorflow-patches-2.12.0.tar.bz2 4194 BLAKE2B b61efaf0ade6ef88b5abb858a84b537e02ff9fcd032a2a7f68a6467e53511a50fff66ef7e1096f343a8909e165b1b76146cb6a8db8e1974eeecf2cbf0b6a71a0 SHA512 2f931fd4b995d33300d392f7dafd6dd23671772f733c28faed239d01e9b032967afb17cab50908fa38956e2cde479a13dfdc632e622d918fe55d281aa9b3dc4e
 DIST tensorflow-runtime-4ce3e4da2e21ae4dfcee9366415e55f408c884ec.tar.gz 15313054 BLAKE2B 316da579b93d83bca43d51198dc65dea12972d73f019a5b78fe53162966e022d21d4225ba4a7786d1a0f376550a1052c59858df04b958768962b88d64d3c5083 SHA512 ea490ebc8a5eef4a7ce6185c19e3b1305fd886c8145ef54387076f458bfec56a8a33452728206afa67001273920f6958317c8c4289e32ac6fea432e15a2502c5
+DIST tensorflow-runtime-769f5cc9b8732933140b09e8808d13614182b496.tar.gz 15183227 BLAKE2B 3c9a3f256db2cd4ff74318da2fc42dbe98669142cc0ea567ac29df4e2faea6e6bc55508f8ec555a88d19bc064123f80e9809affd64628dd9483adfa0dac41aca SHA512 d505278cc7b82f1b1b3c0588e654e64cd63824c920b0b3c93b778ec1f46f005d17e922ee24dde9cb78714f0a2b22c7038f73273d94c46360b7aca92cb5ad61a3
 DIST tensorflow-runtime-7d879c8b161085a4374ea481b93a52adb19c0529.tar.gz 15228644 BLAKE2B e621ece4bbe3139661ef48c628459118eb2078151907630d6fde4086bd73f09af2ab0bb1c43ccf81d84230e3bb3be617e505f76c5d4333fee9adece58e4f4042 SHA512 f79f1e0a44a60cd064e21461380dfd5eb47a8912064f238da4ea94c8c8c94a680e438ff2b202bd0c81049e104293b5bbbcdfb604cf9ebecf6e6bf34d6782b0f5
 DIST tensorflow-runtime-91d765cad5599f9710973d3e34d4dc22583e2e79.tar.gz 15226589 BLAKE2B 5a00d0f884c86f85a25aba8f7d9eee509f35c114e9bfa24ce3effe9437bc549a4a7f013b03b515fbb4a217724a2e2abca96300fba64560b0d0e4fdb05fb9c3ac SHA512 b2fc8a240de1a13fade8628358f410b8f6e1bfde9d2cec3765b62d2ee7eb143c168687a23cb79c7aecd19a668d2d3be46fba361d16ad29b722fe19004aa3c5a2

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
new file mode 100644
index 000000000000..9e93b3d5b8f3
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
@@ -0,0 +1,37 @@
+From 0d3f532325cc39eb816e94e5bae259ea5a5a2304 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:34:44 +0800
+Subject: [PATCH 01/13] WORKSPACE: add rules-docker http_archive,
+ bazel-toolchains uses git_repo
+
+git_repository() rules cannot pull from --distdir and fail when building
+without internet access. Use http_archive instead and pin the sha256
+hash as well.
+---
+ WORKSPACE | 11 +++++++++++
+ 1 file changed, 11 insertions(+)
+
+diff --git a/WORKSPACE b/WORKSPACE
+index fb3af8a2bea..644b731b1dc 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -1,5 +1,16 @@
+ workspace(name = "org_tensorflow")
+ 
++load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
++
++http_archive(
++    name = "io_bazel_rules_docker",
++    sha256 = "7d453450e1eb70e238eea6b31f4115607ec1200e91afea01c25f9804f37e39c8",
++    strip_prefix = "rules_docker-0.10.0",
++    urls = [
++        "https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz",
++    ],
++)
++
+ # We must initialize hermetic python first.
+ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+ 
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch
new file mode 100644
index 000000000000..5436744e1275
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch
@@ -0,0 +1,32 @@
+From 33b11df0767ead9a64a65e3ae19e329bba91dd75 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Sun, 6 Feb 2022 00:13:56 -0800
+Subject: [PATCH 02/13] systemlib: Latest absl LTS has split cord libs
+
+---
+ third_party/absl/system.absl.strings.BUILD | 9 ++++++++-
+ 1 file changed, 8 insertions(+), 1 deletion(-)
+
+diff --git a/third_party/absl/system.absl.strings.BUILD b/third_party/absl/system.absl.strings.BUILD
+index fa9a7a84f67..63bac99d71b 100644
+--- a/third_party/absl/system.absl.strings.BUILD
++++ b/third_party/absl/system.absl.strings.BUILD
+@@ -26,7 +26,14 @@ cc_library(
+ 
+ cc_library(
+     name = "cord",
+-    linkopts = ["-labsl_cord"],
++    linkopts = [
++        "-labsl_cord",
++        "-labsl_cord_internal",
++        "-labsl_cordz_functions",
++        "-labsl_cordz_handle",
++        "-labsl_cordz_info",
++        "-labsl_cordz_sample_token",
++    ],
+     deps = [
+         ":str_format",
+         "//absl/container:compressed_tuple",
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch
new file mode 100644
index 000000000000..8cff4a422ee3
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch
@@ -0,0 +1,29 @@
+From e098854ed15caa864b83033a1bc6b1aa7ca93a5c Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Mon, 5 Sep 2022 12:52:44 -0700
+Subject: [PATCH 03/13] mkl_dnn: Must link against libm for round and log2
+
+---
+ third_party/mkl_dnn/mkldnn_v1.BUILD | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/third_party/mkl_dnn/mkldnn_v1.BUILD b/third_party/mkl_dnn/mkldnn_v1.BUILD
+index 263c64eb681..f1860b1e7c3 100644
+--- a/third_party/mkl_dnn/mkldnn_v1.BUILD
++++ b/third_party/mkl_dnn/mkldnn_v1.BUILD
+@@ -165,9 +165,9 @@ cc_library(
+     includes = _INCLUDES_LIST,
+     # TODO(penpornk): Use lrt_if_needed from tensorflow.bzl instead.
+     linkopts = select({
+-        "@org_tensorflow//tensorflow/tsl:linux_aarch64": ["-lrt"],
+-        "@org_tensorflow//tensorflow/tsl:linux_x86_64": ["-lrt"],
+-        "@org_tensorflow//tensorflow/tsl:linux_ppc64le": ["-lrt"],
++        "@org_tensorflow//tensorflow/tsl:linux_aarch64": ["-lrt", "-lm"],
++        "@org_tensorflow//tensorflow/tsl:linux_x86_64": ["-lrt", "-lm"],
++        "@org_tensorflow//tensorflow/tsl:linux_ppc64le": ["-lrt", "-lm"],
+         "//conditions:default": [],
+     }),
+     textual_hdrs = _TEXTUAL_HDRS_LIST,
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch
new file mode 100644
index 000000000000..0fa4d02d4c62
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch
@@ -0,0 +1,35 @@
+From e6645115b8a838b40a49c73cb948dc373c5e98c8 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:42:48 +0800
+Subject: [PATCH 04/13] tensorflow_cc: Add systemlib nsync linkopts
+
+Linkopts dont get propagated up to the shared library correctly so
+workaround by applying them directly
+---
+ tensorflow/BUILD | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/tensorflow/BUILD b/tensorflow/BUILD
+index 202553cd531..63ce1e7b385 100644
+--- a/tensorflow/BUILD
++++ b/tensorflow/BUILD
+@@ -39,6 +39,7 @@ load(
+     "tf_cc_shared_library",
+ )
+ load("@local_config_cuda//cuda:build_defs.bzl", "if_cuda")
++load("@local_config_syslibs//:build_defs.bzl", "if_system_lib")
+ 
+ # copybara:uncomment_begin
+ # load("//devtools/copybara/rules:copybara.bzl", "copybara_config_test")
+@@ -1312,7 +1313,7 @@ tf_cc_shared_library(
+             "-z defs",
+             "-Wl,--version-script,$(location //tensorflow:tf_version_script.lds)",
+         ],
+-    }),
++    }) + if_system_lib("nsync", ["-lnsync_cpp"]),
+     per_os_targets = True,
+     roots = [
+         "//tensorflow/c:c_api",
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch
new file mode 100644
index 000000000000..7dadd35bc2b7
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch
@@ -0,0 +1,71 @@
+From c390554addb171439310c00dce2972539ac0e71d Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Tue, 30 May 2023 09:10:03 -0700
+Subject: [PATCH 05/13] systemlib: Updates for Abseil 20220623 LTS
+
+These targets are header-only and just need stub bazel targets
+---
+ third_party/absl/system.absl.functional.BUILD | 22 +++++++++++++++++++
+ third_party/absl/system.absl.random.BUILD     | 12 ++++++++++
+ 2 files changed, 34 insertions(+)
+
+diff --git a/third_party/absl/system.absl.functional.BUILD b/third_party/absl/system.absl.functional.BUILD
+index a4f70acf35c..579181dec07 100644
+--- a/third_party/absl/system.absl.functional.BUILD
++++ b/third_party/absl/system.absl.functional.BUILD
+@@ -2,10 +2,32 @@ load("@rules_cc//cc:defs.bzl", "cc_library")
+ 
+ package(default_visibility = ["//visibility:public"])
+ 
++cc_library(
++    name = "any_invocable",
++    deps = [
++        "//absl/base:base_internal",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/meta:type_traits",
++        "//absl/utility",
++    ],
++)
++
+ cc_library(
+     name = "bind_front",
++    deps = [
++        "//absl/base:base_internal",
++        "//absl/container:compressed_tuple",
++        "//absl/meta:type_traits",
++        "//absl/utility",
++    ],
+ )
+ 
+ cc_library(
+     name = "function_ref",
++    deps = [
++        "//absl/base:base_internal",
++        "//absl/base:core_headers",
++        "//absl/meta:type_traits",
++    ],
+ )
+diff --git a/third_party/absl/system.absl.random.BUILD b/third_party/absl/system.absl.random.BUILD
+index 948de07751a..5ebd656be8e 100644
+--- a/third_party/absl/system.absl.random.BUILD
++++ b/third_party/absl/system.absl.random.BUILD
+@@ -51,3 +51,15 @@ cc_library(
+         "//absl/types:span",
+     ],
+ )
++
++cc_library(
++    name = "bit_gen_ref",
++    deps = [
++        ":random",
++        "//absl/base:core_headers",
++        "//absl/base:fast_type_id",
++        "//absl/meta:type_traits",
++        "//absl/random/internal:distribution_caller",
++        "//absl/random/internal:fast_uniform_bits",
++    ],
++)
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0006-systemlib-Update-targets-for-absl_py.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0006-systemlib-Update-targets-for-absl_py.patch
new file mode 100644
index 000000000000..fa021358998c
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0006-systemlib-Update-targets-for-absl_py.patch
@@ -0,0 +1,24 @@
+From d2dc4d308a83cb2d1620e7c5213ec570fe3138af Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:48:15 +0800
+Subject: [PATCH 06/13] systemlib: Update targets for absl_py
+
+---
+ third_party/systemlibs/absl_py.absl.flags.BUILD | 3 +++
+ 1 file changed, 3 insertions(+)
+
+diff --git a/third_party/systemlibs/absl_py.absl.flags.BUILD b/third_party/systemlibs/absl_py.absl.flags.BUILD
+index d92f4949df1..614938fb8c4 100644
+--- a/third_party/systemlibs/absl_py.absl.flags.BUILD
++++ b/third_party/systemlibs/absl_py.absl.flags.BUILD
+@@ -8,4 +8,7 @@ py_library(
+ 
+ py_library(
+     name = "argparse_flags",
++    deps = [
++        ":flags",
++    ],
+ )
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0007-systemlib-Add-well_known_types_py_pb2-target.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0007-systemlib-Add-well_known_types_py_pb2-target.patch
new file mode 100644
index 000000000000..655be6bc919f
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0007-systemlib-Add-well_known_types_py_pb2-target.patch
@@ -0,0 +1,28 @@
+From e58f5674af07a3853e59c32b92d91e590b0224e2 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Sat, 3 Jun 2023 16:23:51 -0700
+Subject: [PATCH 07/13] systemlib: Add well_known_types_py_pb2 target
+
+Bug: https://github.com/tensorflow/tensorflow/issues/60667
+---
+ third_party/systemlibs/protobuf.BUILD | 7 +++++++
+ 1 file changed, 7 insertions(+)
+
+diff --git a/third_party/systemlibs/protobuf.BUILD b/third_party/systemlibs/protobuf.BUILD
+index 4d05ab28d12..b3d72b0e3ad 100644
+--- a/third_party/systemlibs/protobuf.BUILD
++++ b/third_party/systemlibs/protobuf.BUILD
+@@ -111,3 +111,10 @@ py_library(
+     visibility = ["//visibility:public"],
+     deps = [dep + "_proto" for dep in proto[1][1]],
+ ) for proto in WELL_KNOWN_PROTO_MAP.items()]
++
++py_proto_library(
++    name = "well_known_types_py_pb2",
++    include = ".",
++    srcs = [proto[1][0] for proto in WELL_KNOWN_PROTO_MAP.items()],
++    visibility = ["//visibility:public"],
++)
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0008-Relax-setup.py-version-requirements.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0008-Relax-setup.py-version-requirements.patch
new file mode 100644
index 000000000000..5d1667d75e11
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0008-Relax-setup.py-version-requirements.patch
@@ -0,0 +1,38 @@
+From e6cecad5c2595cb1166a78b698377f12da6e7a09 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:54:18 +0800
+Subject: [PATCH 08/13] Relax setup.py version requirements
+
+---
+ tensorflow/tools/pip_package/setup.py | 8 ++++----
+ 1 file changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py
+index fdb718d1628..3897d5316ba 100644
+--- a/tensorflow/tools/pip_package/setup.py
++++ b/tensorflow/tools/pip_package/setup.py
+@@ -101,8 +101,8 @@ REQUIRED_PACKAGES = [
+     'six >= 1.12.0',
+     'termcolor >= 1.1.0',
+     'typing_extensions >= 3.6.6',
+-    'wrapt >= 1.11.0, < 1.15',
+-    'tensorflow-io-gcs-filesystem >= 0.23.1',
++    'wrapt >= 1.11.0',
++    # 'tensorflow-io-gcs-filesystem >= 0.23.1',
+     # grpcio does not build correctly on big-endian machines due to lack of
+     # BoringSSL support.
+     # See https://github.com/tensorflow/tensorflow/issues/17882.
+@@ -140,8 +140,8 @@ FAKE_REQUIRED_PACKAGES = [
+     _VERSION + ';platform_system=="Windows"',
+ ]
+ 
+-if platform.system() == 'Linux' and platform.machine() == 'x86_64':
+-  REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES)
++# if platform.system() == 'Linux' and platform.machine() == 'x86_64':
++#   REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES)
+ 
+ if collaborator_build:
+   # If this is a collaborator build, then build an "installer" wheel and
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0009-systemlib-update-targets-for-absl.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0009-systemlib-update-targets-for-absl.patch
new file mode 100644
index 000000000000..6b946461fba6
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0009-systemlib-update-targets-for-absl.patch
@@ -0,0 +1,365 @@
+From 1a72b50ed5054cb025c0aa2a39ce2499417f2d76 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sun, 26 Nov 2023 13:12:20 +0800
+Subject: [PATCH 09/13] systemlib: update targets for absl
+
+---
+ .../compiler/mlir/tools/kernel_gen/BUILD      |   1 +
+ .../distribute/experimental/rpc/kernels/BUILD |   1 +
+ third_party/absl/system.absl.debugging.BUILD  |  20 +-
+ third_party/absl/system.absl.log.BUILD        | 271 ++++++++++++++++++
+ third_party/absl/workspace.bzl                |   1 +
+ 5 files changed, 289 insertions(+), 5 deletions(-)
+ create mode 100644 third_party/absl/system.absl.log.BUILD
+
+diff --git a/tensorflow/compiler/mlir/tools/kernel_gen/BUILD b/tensorflow/compiler/mlir/tools/kernel_gen/BUILD
+index 71d85d2c96e..f4a479a9daf 100644
+--- a/tensorflow/compiler/mlir/tools/kernel_gen/BUILD
++++ b/tensorflow/compiler/mlir/tools/kernel_gen/BUILD
+@@ -107,6 +107,7 @@ tf_cc_binary(
+         "//tensorflow/compiler/mlir:init_mlir",
+         "//tensorflow/compiler/mlir/tensorflow",
+         "//tensorflow/core:lib",
++        "@com_google_absl//absl/log:check",
+         "@com_google_absl//absl/strings",
+         "@llvm-project//llvm:AArch64CodeGen",  # fixdeps: keep
+         "@llvm-project//llvm:ARMCodeGen",  # fixdeps: keep
+diff --git a/tensorflow/distribute/experimental/rpc/kernels/BUILD b/tensorflow/distribute/experimental/rpc/kernels/BUILD
+index f9a525364c5..8b7f7b54761 100644
+--- a/tensorflow/distribute/experimental/rpc/kernels/BUILD
++++ b/tensorflow/distribute/experimental/rpc/kernels/BUILD
+@@ -65,6 +65,7 @@ tf_kernel_library(
+         "//tensorflow/distribute/experimental/rpc/proto:tf_rpc_service_proto_cc",
+         "@com_github_grpc_grpc//:grpc++",
+         "@com_google_absl//absl/status",
++        "@com_google_absl//absl/log:check",
+         "@com_google_absl//absl/strings",
+         "@com_google_absl//absl/strings:str_format",
+     ],
+diff --git a/third_party/absl/system.absl.debugging.BUILD b/third_party/absl/system.absl.debugging.BUILD
+index 931ffdc9e92..223db7b4c46 100644
+--- a/third_party/absl/system.absl.debugging.BUILD
++++ b/third_party/absl/system.absl.debugging.BUILD
+@@ -26,15 +26,25 @@ cc_library(
+ 
+ cc_library(
+     name = "failure_signal_handler",
+-    linkopts = [
+-        "-labsl_failure_signal_handler",
+-        "-labsl_examine_stack",
++    linkopts = ["-labsl_failure_signal_handler"],
++    deps = [
++        ":examine_stack",
++        ":stacktrace",
++        "//absl/base",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/base:raw_logging_internal",
+     ],
++)
++
++cc_library(
++    name = "examine_stack",
++    linkopts = ["-labsl_examine_stack"],
+     deps = [
+         ":stacktrace",
+         ":symbolize",
+-        "//absl/base",
+-        "//absl/base:errno_saver",
++        "//absl/base:config",
++        "//absl/base:core_headers",
+         "//absl/base:raw_logging_internal",
+     ],
+ )
+diff --git a/third_party/absl/system.absl.log.BUILD b/third_party/absl/system.absl.log.BUILD
+new file mode 100644
+index 00000000000..9a2a5de657e
+--- /dev/null
++++ b/third_party/absl/system.absl.log.BUILD
+@@ -0,0 +1,271 @@
++load("@rules_cc//cc:defs.bzl", "cc_library")
++
++package(default_visibility = ["//visibility:public"])
++
++cc_library(
++    name = "log",
++    deps = [
++        "//absl/log:internal_log_impl",
++    ],
++)
++
++cc_library(
++    name = "internal_log_impl",
++    deps = [
++        ":internal_conditions",
++        ":internal_message",
++        ":internal_strip",
++    ],
++)
++
++cc_library(
++    name = "internal_conditions",
++    linkopts = ["-labsl_log_internal_conditions"],
++    deps = [
++        ":internal_voidify",
++        "//absl/base",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++    ],
++)
++
++cc_library(
++    name = "internal_voidify",
++    deps = ["//absl/base:config"],
++)
++
++cc_library(
++    name = "internal_message",
++    linkopts = ["-labsl_log_internal_message"],
++    deps = [
++        ":entry",
++        ":globals",
++        ":internal_append_truncated",
++        ":internal_format",
++        ":internal_globals",
++        ":internal_log_sink_set",
++        ":internal_nullguard",
++        ":internal_proto",
++        ":severity",
++        ":sink",
++        ":sink_registry",
++        "//absl/base",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/base:errno_saver",
++        "//absl/base:raw_logging_internal",
++        "//absl/base:strerror",
++        "//absl/container:inlined_vector",
++        "//absl/debugging:examine_stack",
++        "//absl/memory",
++        "//absl/strings",
++        "//absl/time",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "internal_append_truncated",
++    deps = [
++        "//absl/base:config",
++        "//absl/strings",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "internal_format",
++    linkopts = ["-labsl_log_internal_format"],
++    deps = [
++        ":internal_append_truncated",
++        ":internal_config",
++        ":internal_globals",
++        ":severity",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/strings",
++        "//absl/strings:str_format",
++        "//absl/time",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "internal_globals",
++    linkopts = ["-labsl_log_internal_globals"],
++    deps = [
++        ":severity",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/base:raw_logging_internal",
++        "//absl/strings",
++        "//absl/time",
++    ],
++)
++
++cc_library(
++    name = "internal_proto",
++    linkopts = ["-labsl_log_internal_proto"],
++    deps = [
++        "//absl/base",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/strings",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "internal_log_sink_set",
++    linkopts = ["-labsl_log_internal_log_sink_set"],
++    deps = [
++        ":entry",
++        ":globals",
++        ":internal_config",
++        ":internal_globals",
++        ":severity",
++        ":sink",
++        "//absl/base",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/base:raw_logging_internal",
++        "//absl/cleanup",
++        "//absl/strings",
++        "//absl/synchronization",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "internal_config",
++    deps = [
++        "//absl/base:config",
++        "//absl/base:core_headers",
++    ],
++)
++
++cc_library(
++    name = "internal_nullguard",
++    linkopts = ["-labsl_log_internal_nullguard"],
++    deps = [
++        "//absl/base:config",
++        "//absl/base:core_headers",
++    ],
++)
++
++cc_library(
++    name = "globals",
++    linkopts = ["-labsl_log_globals"],
++    deps = [
++        ":severity",
++        "//absl/base:atomic_hook",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/base:raw_logging_internal",
++        "//absl/hash",
++        "//absl/strings",
++    ],
++)
++
++cc_library(
++    name = "entry",
++    linkopts = ["-labsl_log_entry"],
++    deps = [
++        ":internal_config",
++        ":severity",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/strings",
++        "//absl/time",
++        "//absl/types:span",
++    ],
++)
++
++cc_library(
++    name = "severity",
++    linkopts = ["-labsl_log_severity"],
++    deps = ["//absl/base:core_headers"],
++)
++
++cc_library(
++    name = "sink",
++    linkopts = ["-labsl_log_sink"],
++    deps = [
++        ":entry",
++        "//absl/base:config",
++    ],
++)
++
++cc_library(
++    name = "sink_registry",
++    deps = [
++        ":internal_log_sink_set",
++        ":sink",
++        "//absl/base:config",
++    ],
++)
++
++cc_library(
++    name = "internal_strip",
++    deps = [
++        ":internal_message",
++        ":internal_nullstream",
++        ":severity",
++    ],
++)
++
++cc_library(
++    name = "internal_nullstream",
++    deps = [
++        ":severity",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/strings",
++    ],
++)
++
++cc_library(
++    name = "check",
++    deps = [
++        ":internal_check_impl",
++        ":internal_check_op",
++        ":internal_conditions",
++        ":internal_message",
++        ":internal_strip",
++        "//absl/base:core_headers",
++    ],
++)
++
++cc_library(
++    name = "internal_check_impl",
++    deps = [
++        ":internal_check_op",
++        ":internal_conditions",
++        ":internal_message",
++        ":internal_strip",
++        "//absl/base:core_headers",
++    ],
++)
++
++cc_library(
++    name = "internal_check_op",
++    linkopts = ["-labsl_log_internal_check_op"],
++    deps = [
++        ":internal_nullguard",
++        ":internal_nullstream",
++        ":internal_strip",
++        "//absl/base:config",
++        "//absl/base:core_headers",
++        "//absl/strings",
++    ],
++)
++
++cc_library(
++    name = "absl_check",
++    deps = [":internal_check_impl"],
++)
++
++cc_library(
++    name = "absl_log",
++    deps = [":internal_log_impl"],
++)
+diff --git a/third_party/absl/workspace.bzl b/third_party/absl/workspace.bzl
+index 07f49cebb78..a7f4e5ffc44 100644
+--- a/third_party/absl/workspace.bzl
++++ b/third_party/absl/workspace.bzl
+@@ -20,6 +20,7 @@ def repo():
+         "flags",
+         "functional",
+         "hash",
++        "log",
+         "memory",
+         "meta",
+         "numeric",
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0010-systemlib-fix-missing-osx-in-pybind11.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0010-systemlib-fix-missing-osx-in-pybind11.patch
new file mode 100644
index 000000000000..24b7cf4eec90
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0010-systemlib-fix-missing-osx-in-pybind11.patch
@@ -0,0 +1,25 @@
+From ce5e7c9b7f0a667514a65dc58ca67b61fa591c8f Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Thu, 21 Dec 2023 22:22:35 +0800
+Subject: [PATCH 10/13] systemlib: fix missing `:osx` in pybind11
+
+---
+ third_party/systemlibs/pybind11.BUILD | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+diff --git a/third_party/systemlibs/pybind11.BUILD b/third_party/systemlibs/pybind11.BUILD
+index 79a483d7b5d..cda63fbd019 100644
+--- a/third_party/systemlibs/pybind11.BUILD
++++ b/third_party/systemlibs/pybind11.BUILD
+@@ -6,3 +6,8 @@ cc_library(
+         "@org_tensorflow//third_party/python_runtime:headers",
+     ],
+ )
++
++config_setting(
++    name = "osx",
++    constraint_values = ["@platforms//os:osx"],
++)
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch
new file mode 100644
index 000000000000..acd46106115f
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch
@@ -0,0 +1,25 @@
+From 084723bca84ba51f7f67209618b5a4e064c1576a Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Thu, 21 Dec 2023 22:24:24 +0800
+Subject: [PATCH 11/13] systemlib: fix missing `LICENSE` in flatbuffers
+
+---
+ third_party/flatbuffers/BUILD.system | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/third_party/flatbuffers/BUILD.system b/third_party/flatbuffers/BUILD.system
+index 8fe4d7a5907..b1d63b4ca0f 100644
+--- a/third_party/flatbuffers/BUILD.system
++++ b/third_party/flatbuffers/BUILD.system
+@@ -1,7 +1,7 @@
+ licenses(["notice"])  # Apache 2.0
+ 
+ filegroup(
+-    name = "LICENSE.txt",
++    name = "LICENSE",
+     visibility = ["//visibility:public"],
+ )
+ 
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0012-build-use-non-hermetic-python.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0012-build-use-non-hermetic-python.patch
new file mode 100644
index 000000000000..67108a290e13
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0012-build-use-non-hermetic-python.patch
@@ -0,0 +1,2745 @@
+From 3f0e4685b47f71c80b18bc5b6cba1afd56070604 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Thu, 21 Dec 2023 22:25:46 +0800
+Subject: [PATCH 12/13] build: use non-hermetic python
+
+---
+ WORKSPACE                                     |  65 --
+ tensorflow/BUILD                              |   2 -
+ tensorflow/compiler/mlir/glob_lit_test.bzl    |   1 -
+ tensorflow/compiler/xla/glob_lit_test.bzl     |   5 -
+ tensorflow/compiler/xla/mlir_hlo/tests/BUILD  |   1 -
+ tensorflow/dtensor/python/tests/BUILD         |   1 -
+ tensorflow/lite/python/BUILD                  |   1 -
+ tensorflow/python/BUILD                       |   1 -
+ tensorflow/python/compiler/tensorrt/BUILD     |   1 -
+ .../experimental/kernel_tests/service/BUILD   |   1 -
+ tensorflow/python/debug/lib/BUILD             |   1 -
+ .../python/distribute/experimental/rpc/BUILD  |   1 -
+ .../python/distribute/failure_handling/BUILD  |   1 -
+ tensorflow/python/eager/BUILD                 |   1 -
+ tensorflow/python/estimator/BUILD             |   5 +-
+ tensorflow/python/framework/BUILD             |   2 -
+ tensorflow/python/keras/BUILD                 |   1 -
+ tensorflow/python/keras/engine/BUILD          |   1 -
+ tensorflow/python/keras/saving/BUILD          |   1 -
+ tensorflow/python/profiler/BUILD              |   1 -
+ .../python/profiler/integration_test/BUILD    |   1 -
+ tensorflow/python/summary/BUILD               |   1 -
+ third_party/py/BUILD.tpl                      |  39 +-
+ third_party/py/{non_hermetic => }/README      |   0
+ third_party/py/non_hermetic/BUILD             |   0
+ third_party/py/non_hermetic/BUILD.tpl         |  80 --
+ third_party/py/non_hermetic/ml_dtypes/BUILD   |   0
+ third_party/py/non_hermetic/ml_dtypes/LICENSE | 202 ----
+ .../py/non_hermetic/ml_dtypes/ml_dtypes.BUILD |  50 -
+ .../ml_dtypes/ml_dtypes.tests.BUILD           |  60 --
+ .../py/non_hermetic/ml_dtypes/workspace.bzl   |  22 -
+ third_party/py/non_hermetic/numpy/BUILD       |  21 -
+ third_party/py/non_hermetic/numpy/README.md   |   4 -
+ .../py/non_hermetic/numpy/tf_numpy_api/BUILD  |  12 -
+ ...ensorflow.experimental.numpy.ndarray.pbtxt |  51 -
+ .../tensorflow.experimental.numpy.pbtxt       | 919 ------------------
+ ...tensorflow.experimental.numpy.random.pbtxt |  35 -
+ .../py/non_hermetic/python_configure.bzl      | 315 ------
+ third_party/py/numpy/BUILD                    |   7 +-
+ third_party/py/numpy/LICENSE                  |  60 --
+ .../tensorflow.experimental.numpy.pbtxt       |   2 +-
+ third_party/py/python_configure.bzl           | 252 ++++-
+ 42 files changed, 291 insertions(+), 1936 deletions(-)
+ rename third_party/py/{non_hermetic => }/README (100%)
+ delete mode 100644 third_party/py/non_hermetic/BUILD
+ delete mode 100644 third_party/py/non_hermetic/BUILD.tpl
+ delete mode 100644 third_party/py/non_hermetic/ml_dtypes/BUILD
+ delete mode 100644 third_party/py/non_hermetic/ml_dtypes/LICENSE
+ delete mode 100644 third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD
+ delete mode 100644 third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD
+ delete mode 100644 third_party/py/non_hermetic/ml_dtypes/workspace.bzl
+ delete mode 100644 third_party/py/non_hermetic/numpy/BUILD
+ delete mode 100644 third_party/py/non_hermetic/numpy/README.md
+ delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD
+ delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt
+ delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
+ delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt
+ delete mode 100644 third_party/py/non_hermetic/python_configure.bzl
+ delete mode 100644 third_party/py/numpy/LICENSE
+
+diff --git a/WORKSPACE b/WORKSPACE
+index 644b731b1dc..3626ae4e805 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -11,71 +11,6 @@ http_archive(
+     ],
+ )
+ 
+-# We must initialize hermetic python first.
+-load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+-
+-http_archive(
+-    name = "bazel_skylib",
+-    sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
+-    urls = [
+-        "https://storage.googleapis.com/mirror.tensorflow.org/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+-        "https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+-    ],
+-)
+-
+-http_archive(
+-    name = "rules_python",
+-    sha256 = "84aec9e21cc56fbc7f1335035a71c850d1b9b5cc6ff497306f84cced9a769841",
+-    strip_prefix = "rules_python-0.23.1",
+-    url = "https://github.com/bazelbuild/rules_python/releases/download/0.23.1/rules_python-0.23.1.tar.gz",
+-)
+-
+-load("@rules_python//python:repositories.bzl", "python_register_toolchains")
+-load(
+-    "//tensorflow/tools/toolchains/python:python_repo.bzl",
+-    "python_repository",
+-)
+-
+-python_repository(name = "python_version_repo")
+-
+-load("@python_version_repo//:py_version.bzl", "HERMETIC_PYTHON_VERSION")
+-
+-python_register_toolchains(
+-    name = "python",
+-    ignore_root_user_error = True,
+-    python_version = HERMETIC_PYTHON_VERSION,
+-)
+-
+-load("@python//:defs.bzl", "interpreter")
+-load("@rules_python//python:pip.bzl", "package_annotation", "pip_parse")
+-
+-NUMPY_ANNOTATIONS = {
+-    "numpy": package_annotation(
+-        additive_build_content = """\
+-filegroup(
+-    name = "includes",
+-    srcs = glob(["site-packages/numpy/core/include/**/*.h"]),
+-)
+-cc_library(
+-    name = "numpy_headers",
+-    hdrs = [":includes"],
+-    strip_include_prefix="site-packages/numpy/core/include/",
+-)
+-""",
+-    ),
+-}
+-
+-pip_parse(
+-    name = "pypi",
+-    annotations = NUMPY_ANNOTATIONS,
+-    python_interpreter_target = interpreter,
+-    requirements = "//:requirements_lock_" + HERMETIC_PYTHON_VERSION.replace(".", "_") + ".txt",
+-)
+-
+-load("@pypi//:requirements.bzl", "install_deps")
+-
+-install_deps()
+-
+ # Initialize the TensorFlow repository and all dependencies.
+ #
+ # The cascade of load() statements and tf_workspace?() calls works around the
+diff --git a/tensorflow/BUILD b/tensorflow/BUILD
+index 63ce1e7b385..9573a982298 100644
+--- a/tensorflow/BUILD
++++ b/tensorflow/BUILD
+@@ -1718,8 +1718,6 @@ py_library(
+         "//tensorflow/lite/python:lite",
+         "//tensorflow/lite/python/authoring",
+         "//tensorflow/python:no_contrib",
+-        "@pypi_keras//:pkg",
+-        "@pypi_tensorboard//:pkg",
+     ],
+ )
+ # copybara:comment_end
+diff --git a/tensorflow/compiler/mlir/glob_lit_test.bzl b/tensorflow/compiler/mlir/glob_lit_test.bzl
+index e689b4c0b31..f65c86b727b 100644
+--- a/tensorflow/compiler/mlir/glob_lit_test.bzl
++++ b/tensorflow/compiler/mlir/glob_lit_test.bzl
+@@ -58,7 +58,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties):
+             "@llvm-project//llvm:count",
+             "@llvm-project//llvm:not",
+         ],
+-        deps = ["@pypi_lit//:pkg"],
+         size = size,
+         main = "lit.py",
+         exec_properties = exec_properties,
+diff --git a/tensorflow/compiler/xla/glob_lit_test.bzl b/tensorflow/compiler/xla/glob_lit_test.bzl
+index 44b838ccb0a..86200b24da1 100644
+--- a/tensorflow/compiler/xla/glob_lit_test.bzl
++++ b/tensorflow/compiler/xla/glob_lit_test.bzl
+@@ -52,10 +52,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties):
+     # can remove this logic. This is necessary to have these tests run on builds
+     # using Python 3.11, but also to not include `@pypi_lit` in standalone xla
+     # builds where it won't be found.
+-    deps = []
+-    if xla_root_dir == "tensorflow/compiler/xla/":
+-        deps.append("@pypi_lit//:pkg")
+-
+     native.py_test(
+         name = name,
+         srcs = ["@llvm-project//llvm:lit"],
+@@ -69,7 +65,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties):
+             "@llvm-project//llvm:count",
+             "@llvm-project//llvm:not",
+         ],
+-        deps = deps,
+         size = size,
+         main = "lit.py",
+         exec_properties = exec_properties,
+diff --git a/tensorflow/compiler/xla/mlir_hlo/tests/BUILD b/tensorflow/compiler/xla/mlir_hlo/tests/BUILD
+index 3b67c8fdbec..30a3c562f75 100644
+--- a/tensorflow/compiler/xla/mlir_hlo/tests/BUILD
++++ b/tensorflow/compiler/xla/mlir_hlo/tests/BUILD
+@@ -26,7 +26,6 @@ package(
+         tags = [
+             "nomsan",  # The execution engine doesn't work with msan, see b/248097619.
+         ],
+-        deps = ["@pypi_lit//:pkg"],
+     )
+     for src in glob(["**/*.mlir"])
+ ]
+diff --git a/tensorflow/dtensor/python/tests/BUILD b/tensorflow/dtensor/python/tests/BUILD
+index 615baad3085..9b6c5839b03 100644
+--- a/tensorflow/dtensor/python/tests/BUILD
++++ b/tensorflow/dtensor/python/tests/BUILD
+@@ -303,7 +303,6 @@ pytype_strict_library(
+         ":test_util",
+         "//tensorflow/python/platform:client_testlib",
+         "@absl_py//absl/flags",
+-        "@pypi_portpicker//:pkg",
+     ],
+ )
+ 
+diff --git a/tensorflow/lite/python/BUILD b/tensorflow/lite/python/BUILD
+index cf03dad0ee0..8b771ac20ae 100644
+--- a/tensorflow/lite/python/BUILD
++++ b/tensorflow/lite/python/BUILD
+@@ -266,7 +266,6 @@ py_test(
+         "//tensorflow/python/framework:test_lib",
+         "//tensorflow/python/platform:client_testlib",
+         "//tensorflow/python/platform:resource_loader",
+-        "@pypi_jax//:pkg",
+     ],
+ )
+ 
+diff --git a/tensorflow/python/BUILD b/tensorflow/python/BUILD
+index c1b7eb7c0ea..4093f4c5c09 100644
+--- a/tensorflow/python/BUILD
++++ b/tensorflow/python/BUILD
+@@ -550,7 +550,6 @@ py_strict_library(
+     deps = [
+         ":keras_lib",
+         "//third_party/py/numpy",
+-        "@pypi_scipy//:pkg",
+         "@six_archive//:six",
+     ],
+ )
+diff --git a/tensorflow/python/compiler/tensorrt/BUILD b/tensorflow/python/compiler/tensorrt/BUILD
+index f3fd845ff53..78a45f4ed25 100644
+--- a/tensorflow/python/compiler/tensorrt/BUILD
++++ b/tensorflow/python/compiler/tensorrt/BUILD
+@@ -69,7 +69,6 @@ py_strict_library(
+         "//tensorflow/python/util:nest",
+         "//tensorflow/python/util:tf_export",
+         "//third_party/py/numpy",
+-        "@pypi_packaging//:pkg",
+         "@six_archive//:six",
+     ],
+ )
+diff --git a/tensorflow/python/data/experimental/kernel_tests/service/BUILD b/tensorflow/python/data/experimental/kernel_tests/service/BUILD
+index 8d36d2e3637..2b8a8fd3654 100644
+--- a/tensorflow/python/data/experimental/kernel_tests/service/BUILD
++++ b/tensorflow/python/data/experimental/kernel_tests/service/BUILD
+@@ -143,7 +143,6 @@ tf_py_strict_test(
+         "//tensorflow/python/ops:array_ops",
+         "//tensorflow/python/platform:client_testlib",
+         "@absl_py//absl/testing:parameterized",
+-        "@pypi_portpicker//:pkg",
+     ],
+ )
+ 
+diff --git a/tensorflow/python/debug/lib/BUILD b/tensorflow/python/debug/lib/BUILD
+index 37c99b30dd2..012e349dffc 100644
+--- a/tensorflow/python/debug/lib/BUILD
++++ b/tensorflow/python/debug/lib/BUILD
+@@ -596,7 +596,6 @@ py_strict_library(
+         "//tensorflow/python/lib/io:lib",
+         "//tensorflow/python/ops:variables",
+         "//tensorflow/python/util:compat",
+-        "@pypi_portpicker//:pkg",
+     ],
+ )
+ 
+diff --git a/tensorflow/python/distribute/experimental/rpc/BUILD b/tensorflow/python/distribute/experimental/rpc/BUILD
+index 94855205c70..3b3e3f9aee3 100644
+--- a/tensorflow/python/distribute/experimental/rpc/BUILD
++++ b/tensorflow/python/distribute/experimental/rpc/BUILD
+@@ -60,6 +60,5 @@ tf_py_strict_test(
+         "//tensorflow/python/ops:variables",
+         "//tensorflow/python/platform:client_testlib",
+         "//tensorflow/python/util:nest",
+-        "@pypi_portpicker//:pkg",
+     ],
+ )
+diff --git a/tensorflow/python/distribute/failure_handling/BUILD b/tensorflow/python/distribute/failure_handling/BUILD
+index 77317019fee..df52d80552e 100644
+--- a/tensorflow/python/distribute/failure_handling/BUILD
++++ b/tensorflow/python/distribute/failure_handling/BUILD
+@@ -47,7 +47,6 @@ py_strict_library(
+     deps = [
+         "//tensorflow/python/eager:context",
+         "//tensorflow/python/platform:tf_logging",
+-        "@pypi_requests//:pkg",
+         "@six_archive//:six",
+     ],
+ )
+diff --git a/tensorflow/python/eager/BUILD b/tensorflow/python/eager/BUILD
+index b7bc8350e13..dc5e0ae232f 100644
+--- a/tensorflow/python/eager/BUILD
++++ b/tensorflow/python/eager/BUILD
+@@ -1167,7 +1167,6 @@ cuda_py_strict_test(
+         "//tensorflow/python/training:server_lib",
+         "//tensorflow/python/util:compat",
+         "@absl_py//absl/testing:parameterized",
+-        "@pypi_portpicker//:pkg",
+     ],
+ )
+ 
+diff --git a/tensorflow/python/estimator/BUILD b/tensorflow/python/estimator/BUILD
+index 73a7c2626bb..0952ccb3154 100644
+--- a/tensorflow/python/estimator/BUILD
++++ b/tensorflow/python/estimator/BUILD
+@@ -380,7 +380,8 @@ py_library(
+     ],
+ )
+ 
+-alias(
++py_library(
+     name = "expect_tensorflow_estimator_installed",
+-    actual = "@pypi_tensorflow_estimator//:pkg",
++    srcs_version = "PY3",
++    visibility = ["//visibility:public"],
+ )
+diff --git a/tensorflow/python/framework/BUILD b/tensorflow/python/framework/BUILD
+index d8ce1f5c0bf..1100c23b562 100644
+--- a/tensorflow/python/framework/BUILD
++++ b/tensorflow/python/framework/BUILD
+@@ -359,7 +359,6 @@ py_strict_library(
+         "//tensorflow/python/util:deprecation",
+         "//tensorflow/python/util:tf_export",
+         "//third_party/py/numpy",
+-        "@pypi_packaging//:pkg",
+     ] + if_xla_available([
+         "//tensorflow/python:_pywrap_tfcompile",
+     ]),
+@@ -2036,7 +2035,6 @@ py_strict_library(
+         "//tensorflow/python/util/protobuf",
+         "//third_party/py/numpy",
+         "@absl_py//absl/testing:parameterized",
+-        "@pypi_portpicker//:pkg",
+     ],
+ )
+ 
+diff --git a/tensorflow/python/keras/BUILD b/tensorflow/python/keras/BUILD
+index c271a5ef77a..d516853a13e 100755
+--- a/tensorflow/python/keras/BUILD
++++ b/tensorflow/python/keras/BUILD
+@@ -42,7 +42,6 @@ py_library(
+         "//tensorflow/python/saved_model",
+         "//tensorflow/python/training",
+         "//tensorflow/python/util:nest",
+-        "@pypi_h5py//:pkg",
+     ],
+ )
+ 
+diff --git a/tensorflow/python/keras/engine/BUILD b/tensorflow/python/keras/engine/BUILD
+index 2098b1650bc..287b1a4aa91 100644
+--- a/tensorflow/python/keras/engine/BUILD
++++ b/tensorflow/python/keras/engine/BUILD
+@@ -93,7 +93,6 @@ py_library(
+         "//tensorflow/python/util:tf_decorator",
+         "//tensorflow/python/util:tf_export",
+         "//tensorflow/tools/docs:doc_controls",
+-        "@pypi_h5py//:pkg",
+     ],
+ )
+ 
+diff --git a/tensorflow/python/keras/saving/BUILD b/tensorflow/python/keras/saving/BUILD
+index d7cb2ccb2fc..b40d979c82d 100644
+--- a/tensorflow/python/keras/saving/BUILD
++++ b/tensorflow/python/keras/saving/BUILD
+@@ -53,6 +53,5 @@ py_library(
+         "//tensorflow/python/platform:tf_logging",
+         "//tensorflow/python/saved_model",
+         "//tensorflow/python/training:saver",
+-        "@pypi_h5py//:pkg",
+     ],
+ )
+diff --git a/tensorflow/python/profiler/BUILD b/tensorflow/python/profiler/BUILD
+index b1cfd6ea10c..9413aeeab8b 100644
+--- a/tensorflow/python/profiler/BUILD
++++ b/tensorflow/python/profiler/BUILD
+@@ -43,7 +43,6 @@ cuda_py_strict_test(
+         "//tensorflow/python/eager:test",
+         "//tensorflow/python/framework:errors",
+         "//tensorflow/python/framework:test_lib",
+-        "@pypi_portpicker//:pkg",
+     ],
+ )
+ 
+diff --git a/tensorflow/python/profiler/integration_test/BUILD b/tensorflow/python/profiler/integration_test/BUILD
+index b20698ea6ea..e7060e5a315 100644
+--- a/tensorflow/python/profiler/integration_test/BUILD
++++ b/tensorflow/python/profiler/integration_test/BUILD
+@@ -35,6 +35,5 @@ cuda_py_strict_test(
+         "//tensorflow/python/platform:tf_logging",
+         "//tensorflow/python/profiler:profiler_client",
+         "//tensorflow/python/profiler:profiler_v2",
+-        "@pypi_portpicker//:pkg",
+     ],
+ )
+diff --git a/tensorflow/python/summary/BUILD b/tensorflow/python/summary/BUILD
+index 126fb6d31f7..b292e39356f 100644
+--- a/tensorflow/python/summary/BUILD
++++ b/tensorflow/python/summary/BUILD
+@@ -121,6 +121,5 @@ tf_py_strict_test(
+         "//tensorflow/python/ops:summary_ops_v2",
+         "//tensorflow/python/platform:client_testlib",
+         "//tensorflow/python/training:training_util",
+-        "@pypi_tensorboard//:pkg",
+     ],
+ )
+diff --git a/third_party/py/BUILD.tpl b/third_party/py/BUILD.tpl
+index 7cc1e085684..45480bd4a31 100644
+--- a/third_party/py/BUILD.tpl
++++ b/third_party/py/BUILD.tpl
+@@ -5,17 +5,16 @@ package(default_visibility = ["//visibility:public"])
+ # Point both runtimes to the same python binary to ensure we always
+ # use the python binary specified by ./configure.py script.
+ load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair")
+-load("@python//:defs.bzl", "interpreter")
+ 
+ py_runtime(
+     name = "py2_runtime",
+-    interpreter_path = interpreter,
++    interpreter_path = "%{PYTHON_BIN_PATH}",
+     python_version = "PY2",
+ )
+ 
+ py_runtime(
+     name = "py3_runtime",
+-    interpreter_path = interpreter,
++    interpreter_path = "%{PYTHON_BIN_PATH}",
+     python_version = "PY3",
+ )
+ 
+@@ -33,8 +32,27 @@ toolchain(
+     exec_compatible_with = [%{PLATFORM_CONSTRAINT}],
+ )
+ 
+-alias(name = "python_headers",
+-      actual = "@python//:python_headers")
++# To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib
++# See https://docs.python.org/3/extending/windows.html
++cc_import(
++    name = "python_lib",
++    interface_library = select({
++        ":windows": ":python_import_lib",
++        # A placeholder for Unix platforms which makes --no_build happy.
++        "//conditions:default": "not-existing.lib",
++    }),
++    system_provided = 1,
++)
++
++cc_library(
++    name = "python_headers",
++    hdrs = [":python_include"],
++    deps = select({
++        ":windows": [":python_lib"],
++        "//conditions:default": [],
++    }),
++    includes = ["python_include"],
++)
+ 
+ # This alias is exists for the use of targets in the @llvm-project dependency,
+ # which expect a python_headers target called @python_runtime//:headers. We use
+@@ -45,9 +63,18 @@ alias(
+     actual = ":python_headers",
+ )
+ 
++cc_library(
++    name = "numpy_headers",
++    hdrs = [":numpy_include"],
++    includes = ["numpy_include"],
++)
+ 
+ config_setting(
+     name = "windows",
+     values = {"cpu": "x64_windows"},
+     visibility = ["//visibility:public"],
+-)
+\ No newline at end of file
++)
++
++%{PYTHON_INCLUDE_GENRULE}
++%{NUMPY_INCLUDE_GENRULE}
++%{PYTHON_IMPORT_LIB_GENRULE}
+\ No newline at end of file
+diff --git a/third_party/py/non_hermetic/README b/third_party/py/README
+similarity index 100%
+rename from third_party/py/non_hermetic/README
+rename to third_party/py/README
+diff --git a/third_party/py/non_hermetic/BUILD b/third_party/py/non_hermetic/BUILD
+deleted file mode 100644
+index e69de29bb2d..00000000000
+diff --git a/third_party/py/non_hermetic/BUILD.tpl b/third_party/py/non_hermetic/BUILD.tpl
+deleted file mode 100644
+index 45480bd4a31..00000000000
+--- a/third_party/py/non_hermetic/BUILD.tpl
++++ /dev/null
+@@ -1,80 +0,0 @@
+-licenses(["restricted"])
+-
+-package(default_visibility = ["//visibility:public"])
+-
+-# Point both runtimes to the same python binary to ensure we always
+-# use the python binary specified by ./configure.py script.
+-load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair")
+-
+-py_runtime(
+-    name = "py2_runtime",
+-    interpreter_path = "%{PYTHON_BIN_PATH}",
+-    python_version = "PY2",
+-)
+-
+-py_runtime(
+-    name = "py3_runtime",
+-    interpreter_path = "%{PYTHON_BIN_PATH}",
+-    python_version = "PY3",
+-)
+-
+-py_runtime_pair(
+-    name = "py_runtime_pair",
+-    py2_runtime = ":py2_runtime",
+-    py3_runtime = ":py3_runtime",
+-)
+-
+-toolchain(
+-    name = "py_toolchain",
+-    toolchain = ":py_runtime_pair",
+-    toolchain_type = "@bazel_tools//tools/python:toolchain_type",
+-    target_compatible_with = [%{PLATFORM_CONSTRAINT}],
+-    exec_compatible_with = [%{PLATFORM_CONSTRAINT}],
+-)
+-
+-# To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib
+-# See https://docs.python.org/3/extending/windows.html
+-cc_import(
+-    name = "python_lib",
+-    interface_library = select({
+-        ":windows": ":python_import_lib",
+-        # A placeholder for Unix platforms which makes --no_build happy.
+-        "//conditions:default": "not-existing.lib",
+-    }),
+-    system_provided = 1,
+-)
+-
+-cc_library(
+-    name = "python_headers",
+-    hdrs = [":python_include"],
+-    deps = select({
+-        ":windows": [":python_lib"],
+-        "//conditions:default": [],
+-    }),
+-    includes = ["python_include"],
+-)
+-
+-# This alias is exists for the use of targets in the @llvm-project dependency,
+-# which expect a python_headers target called @python_runtime//:headers. We use
+-# a repo_mapping to alias python_runtime to this package, and an alias to create
+-# the correct target.
+-alias(
+-    name = "headers",
+-    actual = ":python_headers",
+-)
+-
+-cc_library(
+-    name = "numpy_headers",
+-    hdrs = [":numpy_include"],
+-    includes = ["numpy_include"],
+-)
+-
+-config_setting(
+-    name = "windows",
+-    values = {"cpu": "x64_windows"},
+-    visibility = ["//visibility:public"],
+-)
+-
+-%{PYTHON_INCLUDE_GENRULE}
+-%{NUMPY_INCLUDE_GENRULE}
+-%{PYTHON_IMPORT_LIB_GENRULE}
+\ No newline at end of file
+diff --git a/third_party/py/non_hermetic/ml_dtypes/BUILD b/third_party/py/non_hermetic/ml_dtypes/BUILD
+deleted file mode 100644
+index e69de29bb2d..00000000000
+diff --git a/third_party/py/non_hermetic/ml_dtypes/LICENSE b/third_party/py/non_hermetic/ml_dtypes/LICENSE
+deleted file mode 100644
+index d6456956733..00000000000
+--- a/third_party/py/non_hermetic/ml_dtypes/LICENSE
++++ /dev/null
+@@ -1,202 +0,0 @@
+-
+-                                 Apache License
+-                           Version 2.0, January 2004
+-                        http://www.apache.org/licenses/
+-
+-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+-
+-   1. Definitions.
+-
+-      "License" shall mean the terms and conditions for use, reproduction,
+-      and distribution as defined by Sections 1 through 9 of this document.
+-
+-      "Licensor" shall mean the copyright owner or entity authorized by
+-      the copyright owner that is granting the License.
+-
+-      "Legal Entity" shall mean the union of the acting entity and all
+-      other entities that control, are controlled by, or are under common
+-      control with that entity. For the purposes of this definition,
+-      "control" means (i) the power, direct or indirect, to cause the
+-      direction or management of such entity, whether by contract or
+-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+-      outstanding shares, or (iii) beneficial ownership of such entity.
+-
+-      "You" (or "Your") shall mean an individual or Legal Entity
+-      exercising permissions granted by this License.
+-
+-      "Source" form shall mean the preferred form for making modifications,
+-      including but not limited to software source code, documentation
+-      source, and configuration files.
+-
+-      "Object" form shall mean any form resulting from mechanical
+-      transformation or translation of a Source form, including but
+-      not limited to compiled object code, generated documentation,
+-      and conversions to other media types.
+-
+-      "Work" shall mean the work of authorship, whether in Source or
+-      Object form, made available under the License, as indicated by a
+-      copyright notice that is included in or attached to the work
+-      (an example is provided in the Appendix below).
+-
+-      "Derivative Works" shall mean any work, whether in Source or Object
+-      form, that is based on (or derived from) the Work and for which the
+-      editorial revisions, annotations, elaborations, or other modifications
+-      represent, as a whole, an original work of authorship. For the purposes
+-      of this License, Derivative Works shall not include works that remain
+-      separable from, or merely link (or bind by name) to the interfaces of,
+-      the Work and Derivative Works thereof.
+-
+-      "Contribution" shall mean any work of authorship, including
+-      the original version of the Work and any modifications or additions
+-      to that Work or Derivative Works thereof, that is intentionally
+-      submitted to Licensor for inclusion in the Work by the copyright owner
+-      or by an individual or Legal Entity authorized to submit on behalf of
+-      the copyright owner. For the purposes of this definition, "submitted"
+-      means any form of electronic, verbal, or written communication sent
+-      to the Licensor or its representatives, including but not limited to
+-      communication on electronic mailing lists, source code control systems,
+-      and issue tracking systems that are managed by, or on behalf of, the
+-      Licensor for the purpose of discussing and improving the Work, but
+-      excluding communication that is conspicuously marked or otherwise
+-      designated in writing by the copyright owner as "Not a Contribution."
+-
+-      "Contributor" shall mean Licensor and any individual or Legal Entity
+-      on behalf of whom a Contribution has been received by Licensor and
+-      subsequently incorporated within the Work.
+-
+-   2. Grant of Copyright License. Subject to the terms and conditions of
+-      this License, each Contributor hereby grants to You a perpetual,
+-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+-      copyright license to reproduce, prepare Derivative Works of,
+-      publicly display, publicly perform, sublicense, and distribute the
+-      Work and such Derivative Works in Source or Object form.
+-
+-   3. Grant of Patent License. Subject to the terms and conditions of
+-      this License, each Contributor hereby grants to You a perpetual,
+-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+-      (except as stated in this section) patent license to make, have made,
+-      use, offer to sell, sell, import, and otherwise transfer the Work,
+-      where such license applies only to those patent claims licensable
+-      by such Contributor that are necessarily infringed by their
+-      Contribution(s) alone or by combination of their Contribution(s)
+-      with the Work to which such Contribution(s) was submitted. If You
+-      institute patent litigation against any entity (including a
+-      cross-claim or counterclaim in a lawsuit) alleging that the Work
+-      or a Contribution incorporated within the Work constitutes direct
+-      or contributory patent infringement, then any patent licenses
+-      granted to You under this License for that Work shall terminate
+-      as of the date such litigation is filed.
+-
+-   4. Redistribution. You may reproduce and distribute copies of the
+-      Work or Derivative Works thereof in any medium, with or without
+-      modifications, and in Source or Object form, provided that You
+-      meet the following conditions:
+-
+-      (a) You must give any other recipients of the Work or
+-          Derivative Works a copy of this License; and
+-
+-      (b) You must cause any modified files to carry prominent notices
+-          stating that You changed the files; and
+-
+-      (c) You must retain, in the Source form of any Derivative Works
+-          that You distribute, all copyright, patent, trademark, and
+-          attribution notices from the Source form of the Work,
+-          excluding those notices that do not pertain to any part of
+-          the Derivative Works; and
+-
+-      (d) If the Work includes a "NOTICE" text file as part of its
+-          distribution, then any Derivative Works that You distribute must
+-          include a readable copy of the attribution notices contained
+-          within such NOTICE file, excluding those notices that do not
+-          pertain to any part of the Derivative Works, in at least one
+-          of the following places: within a NOTICE text file distributed
+-          as part of the Derivative Works; within the Source form or
+-          documentation, if provided along with the Derivative Works; or,
+-          within a display generated by the Derivative Works, if and
+-          wherever such third-party notices normally appear. The contents
+-          of the NOTICE file are for informational purposes only and
+-          do not modify the License. You may add Your own attribution
+-          notices within Derivative Works that You distribute, alongside
+-          or as an addendum to the NOTICE text from the Work, provided
+-          that such additional attribution notices cannot be construed
+-          as modifying the License.
+-
+-      You may add Your own copyright statement to Your modifications and
+-      may provide additional or different license terms and conditions
+-      for use, reproduction, or distribution of Your modifications, or
+-      for any such Derivative Works as a whole, provided Your use,
+-      reproduction, and distribution of the Work otherwise complies with
+-      the conditions stated in this License.
+-
+-   5. Submission of Contributions. Unless You explicitly state otherwise,
+-      any Contribution intentionally submitted for inclusion in the Work
+-      by You to the Licensor shall be under the terms and conditions of
+-      this License, without any additional terms or conditions.
+-      Notwithstanding the above, nothing herein shall supersede or modify
+-      the terms of any separate license agreement you may have executed
+-      with Licensor regarding such Contributions.
+-
+-   6. Trademarks. This License does not grant permission to use the trade
+-      names, trademarks, service marks, or product names of the Licensor,
+-      except as required for reasonable and customary use in describing the
+-      origin of the Work and reproducing the content of the NOTICE file.
+-
+-   7. Disclaimer of Warranty. Unless required by applicable law or
+-      agreed to in writing, Licensor provides the Work (and each
+-      Contributor provides its Contributions) on an "AS IS" BASIS,
+-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+-      implied, including, without limitation, any warranties or conditions
+-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+-      PARTICULAR PURPOSE. You are solely responsible for determining the
+-      appropriateness of using or redistributing the Work and assume any
+-      risks associated with Your exercise of permissions under this License.
+-
+-   8. Limitation of Liability. In no event and under no legal theory,
+-      whether in tort (including negligence), contract, or otherwise,
+-      unless required by applicable law (such as deliberate and grossly
+-      negligent acts) or agreed to in writing, shall any Contributor be
+-      liable to You for damages, including any direct, indirect, special,
+-      incidental, or consequential damages of any character arising as a
+-      result of this License or out of the use or inability to use the
+-      Work (including but not limited to damages for loss of goodwill,
+-      work stoppage, computer failure or malfunction, or any and all
+-      other commercial damages or losses), even if such Contributor
+-      has been advised of the possibility of such damages.
+-
+-   9. Accepting Warranty or Additional Liability. While redistributing
+-      the Work or Derivative Works thereof, You may choose to offer,
+-      and charge a fee for, acceptance of support, warranty, indemnity,
+-      or other liability obligations and/or rights consistent with this
+-      License. However, in accepting such obligations, You may act only
+-      on Your own behalf and on Your sole responsibility, not on behalf
+-      of any other Contributor, and only if You agree to indemnify,
+-      defend, and hold each Contributor harmless for any liability
+-      incurred by, or claims asserted against, such Contributor by reason
+-      of your accepting any such warranty or additional liability.
+-
+-   END OF TERMS AND CONDITIONS
+-
+-   APPENDIX: How to apply the Apache License to your work.
+-
+-      To apply the Apache License to your work, attach the following
+-      boilerplate notice, with the fields enclosed by brackets "[]"
+-      replaced with your own identifying information. (Don't include
+-      the brackets!)  The text should be enclosed in the appropriate
+-      comment syntax for the file format. We also recommend that a
+-      file or class name and description of purpose be included on the
+-      same "printed page" as the copyright notice for easier
+-      identification within third-party archives.
+-
+-   Copyright [yyyy] [name of copyright owner]
+-
+-   Licensed under the Apache License, Version 2.0 (the "License");
+-   you may not use this file except in compliance with the License.
+-   You may obtain a copy of the License at
+-
+-       http://www.apache.org/licenses/LICENSE-2.0
+-
+-   Unless required by applicable law or agreed to in writing, software
+-   distributed under the License is distributed on an "AS IS" BASIS,
+-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-   See the License for the specific language governing permissions and
+-   limitations under the License.
+diff --git a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD b/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD
+deleted file mode 100644
+index 95f58d3c476..00000000000
+--- a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD
++++ /dev/null
+@@ -1,50 +0,0 @@
+-load("@pybind11_bazel//:build_defs.bzl", "pybind_extension")
+-
+-package(
+-    default_visibility = ["//visibility:public"],
+-    licenses = ["notice"],
+-)
+-
+-exports_files(["LICENSE"])
+-
+-cc_library(
+-    name = "float8",
+-    hdrs = ["include/float8.h"],
+-    # Internal headers are all relative to , but other packages
+-    # include these headers with the  prefix.
+-    includes = [
+-        ".",
+-        "ml_dtypes",
+-    ],
+-    deps = ["@org_tensorflow//third_party/eigen3"],
+-)
+-
+-pybind_extension(
+-    name = "_custom_floats",
+-    srcs = [
+-        "_src/common.h",
+-        "_src/custom_float.h",
+-        "_src/dtypes.cc",
+-        "_src/int4.h",
+-        "_src/numpy.cc",
+-        "_src/numpy.h",
+-        "_src/ufuncs.h",
+-    ],
+-    includes = ["ml_dtypes"],
+-    visibility = [":__subpackages__"],
+-    deps = [
+-        ":float8",
+-        "@org_tensorflow//third_party/eigen3",
+-        "@org_tensorflow//third_party/py/numpy:headers",
+-    ],
+-)
+-
+-py_library(
+-    name = "ml_dtypes",
+-    srcs = [
+-        "__init__.py",
+-        "_finfo.py",
+-        "_iinfo.py",
+-    ],
+-    deps = [":_custom_floats"],
+-)
+diff --git a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD b/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD
+deleted file mode 100644
+index fde5f2eaccf..00000000000
+--- a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD
++++ /dev/null
+@@ -1,60 +0,0 @@
+-package(
+-    default_visibility = ["//visibility:public"],
+-)
+-
+-py_library(
+-    name = "testing_base",
+-    deps = [
+-        "//:ml_dtypes",
+-        "@absl_py//absl/testing:absltest",
+-        "@absl_py//absl/testing:parameterized",
+-        "@org_tensorflow//third_party/py/numpy",
+-    ],
+-)
+-
+-py_test(
+-    name = "custom_float_test",
+-    srcs = ["custom_float_test.py"],
+-    main = "custom_float_test.py",
+-    deps = [":testing_base"],
+-)
+-
+-py_test(
+-    name = "int4_test",
+-    srcs = ["int4_test.py"],
+-    main = "int4_test.py",
+-    deps = [":testing_base"],
+-)
+-
+-py_test(
+-    name = "iinfo_test",
+-    srcs = ["iinfo_test.py"],
+-    main = "iinfo_test.py",
+-    deps = [":testing_base"],
+-)
+-
+-py_test(
+-    name = "finfo_test",
+-    srcs = ["finfo_test.py"],
+-    main = "finfo_test.py",
+-    deps = [":testing_base"],
+-)
+-
+-py_test(
+-    name = "metadata_test",
+-    srcs = ["metadata_test.py"],
+-    main = "metadata_test.py",
+-    deps = [":testing_base"],
+-)
+-
+-cc_test(
+-    name = "float8_test",
+-    srcs = ["float8_test.cc"],
+-    linkstatic = 1,
+-    deps = [
+-        "//:float8",
+-        "@com_google_absl//absl/strings",
+-        "@com_google_googletest//:gtest_main",
+-        "@org_tensorflow//third_party/eigen3",
+-    ],
+-)
+diff --git a/third_party/py/non_hermetic/ml_dtypes/workspace.bzl b/third_party/py/non_hermetic/ml_dtypes/workspace.bzl
+deleted file mode 100644
+index 2c34f494c34..00000000000
+--- a/third_party/py/non_hermetic/ml_dtypes/workspace.bzl
++++ /dev/null
+@@ -1,22 +0,0 @@
+-"""Provides the repo macro to import ml_dtypes.
+-
+-ml_dtypes provides machine-learning-specific data-types like bfloat16,
+-float8 varieties, and int4.
+-"""
+-
+-load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
+-
+-def repo():
+-    ML_DTYPES_COMMIT = "5b9fc9ad978757654843f4a8d899715dbea30e88"
+-    ML_DTYPES_SHA256 = "9662811d9ab3823a56f8fa91b5a67fd82062b6dd4f187169b41e82a44e526455"
+-    tf_http_archive(
+-        name = "ml_dtypes",
+-        build_file = "//third_party/py/ml_dtypes:ml_dtypes.BUILD",
+-        link_files = {
+-            "//third_party/py/ml_dtypes:ml_dtypes.tests.BUILD": "tests/BUILD.bazel",
+-            "//third_party/py/ml_dtypes:LICENSE": "LICENSE",
+-        },
+-        sha256 = ML_DTYPES_SHA256,
+-        strip_prefix = "ml_dtypes-{commit}/ml_dtypes".format(commit = ML_DTYPES_COMMIT),
+-        urls = tf_mirror_urls("https://github.com/jax-ml/ml_dtypes/archive/{commit}/ml_dtypes-{commit}.tar.gz".format(commit = ML_DTYPES_COMMIT)),
+-    )
+diff --git a/third_party/py/non_hermetic/numpy/BUILD b/third_party/py/non_hermetic/numpy/BUILD
+deleted file mode 100644
+index c80cc5287bc..00000000000
+--- a/third_party/py/non_hermetic/numpy/BUILD
++++ /dev/null
+@@ -1,21 +0,0 @@
+-licenses(["restricted"])
+-
+-package(default_visibility = ["//visibility:public"])
+-
+-py_library(
+-    name = "numpy",
+-    srcs = ["tf_numpy_dummy.py"],
+-    srcs_version = "PY3",
+-)
+-
+-alias(
+-    name = "headers",
+-    actual = "@local_config_python//:numpy_headers",
+-)
+-
+-genrule(
+-    name = "dummy",
+-    outs = ["tf_numpy_dummy.py"],
+-    cmd = "touch $@",
+-    visibility = ["//visibility:private"],
+-)
+diff --git a/third_party/py/non_hermetic/numpy/README.md b/third_party/py/non_hermetic/numpy/README.md
+deleted file mode 100644
+index 4e58b9df87b..00000000000
+--- a/third_party/py/non_hermetic/numpy/README.md
++++ /dev/null
+@@ -1,4 +0,0 @@
+-# numpy_ops
+-
+-The folder tf_numpy_api/ contains lists of NumPy API symbols that the
+-`numpy_ops` internal module in TensorFlow implements.
+diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD b/third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD
+deleted file mode 100644
+index 070f8ab8a65..00000000000
+--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD
++++ /dev/null
+@@ -1,12 +0,0 @@
+-# TensorFlow API backwards compatibility test goldens for tf.experimental.numpy.
+-
+-package(
+-    # copybara:uncomment default_applicable_licenses = ["//tensorflow:license"],
+-    default_visibility = ["//visibility:public"],
+-    licenses = ["notice"],
+-)
+-
+-filegroup(
+-    name = "api_golden",
+-    srcs = glob(["*.pbtxt"]),
+-)
+diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt b/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt
+deleted file mode 100644
+index 9198264c029..00000000000
+--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt
++++ /dev/null
+@@ -1,51 +0,0 @@
+-path: "tensorflow.experimental.numpy.ndarray"
+-tf_class {
+-  is_instance: "<class \'tensorflow.python.framework.tensor.Tensor\'>"
+-  is_instance: "<class \'tensorflow.python.types.internal.NativeObject\'>"
+-  is_instance: "<class \'tensorflow.python.types.core.Symbol\'>"
+-  is_instance: "<class \'tensorflow.python.types.core.Tensor\'>"
+-  is_instance: "<type \'object\'>"
+-  member {
+-    name: "OVERLOADABLE_OPERATORS"
+-    mtype: "<type \'set\'>"
+-  }
+-  member {
+-    name: "dtype"
+-    mtype: "<type \'property\'>"
+-  }
+-  member {
+-    name: "name"
+-    mtype: "<type \'property\'>"
+-  }
+-  member {
+-    name: "ndim"
+-    mtype: "<type \'property\'>"
+-  }
+-  member {
+-    name: "shape"
+-    mtype: "<type \'property\'>"
+-  }
+-  member_method {
+-    name: "__init__"
+-  }
+-  member_method {
+-    name: "eval"
+-    argspec: "args=[\'self\', \'feed_dict\', \'session\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "experimental_ref"
+-    argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "get_shape"
+-    argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "ref"
+-    argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "set_shape"
+-    argspec: "args=[\'self\', \'shape\'], varargs=None, keywords=None, defaults=None"
+-  }
+-}
+diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt b/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
+deleted file mode 100644
+index 2f5490ad0c9..00000000000
+--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
++++ /dev/null
+@@ -1,919 +0,0 @@
+-path: "tensorflow.experimental.numpy"
+-tf_module {
+-  member {
+-    name: "bool_"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "complex128"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "complex64"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "complex_"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "e"
+-    mtype: "<class \'float\'>"
+-  }
+-  member {
+-    name: "float16"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "float32"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "float64"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "float_"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "iinfo"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "inexact"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "inf"
+-    mtype: "<class \'float\'>"
+-  }
+-  member {
+-    name: "int16"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "int32"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "int64"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "int8"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "int_"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "ndarray"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "newaxis"
+-    mtype: "<type \'NoneType\'>"
+-  }
+-  member {
+-    name: "object_"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "pi"
+-    mtype: "<class \'float\'>"
+-  }
+-  member {
+-    name: "random"
+-    mtype: "<type \'module\'>"
+-  }
+-  member {
+-    name: "string_"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "uint16"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "uint32"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "uint64"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "uint8"
+-    mtype: "<type \'type\'>"
+-  }
+-  member {
+-    name: "unicode_"
+-    mtype: "<type \'type\'>"
+-  }
+-  member_method {
+-    name: "abs"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "absolute"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "add"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "all"
+-    argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "allclose"
+-    argspec: "args=[\'a\', \'b\', \'rtol\', \'atol\', \'equal_nan\'], varargs=None, keywords=None, defaults=[\'1e-05\', \'1e-08\', \'False\'], "
+-  }
+-  member_method {
+-    name: "amax"
+-    argspec: "args=[\'a\', \'axis\', \'out\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "amin"
+-    argspec: "args=[\'a\', \'axis\', \'out\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "angle"
+-    argspec: "args=[\'z\', \'deg\'], varargs=None, keywords=None, defaults=[\'False\'], "
+-  }
+-  member_method {
+-    name: "any"
+-    argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "append"
+-    argspec: "args=[\'arr\', \'values\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "arange"
+-    argspec: "args=[\'start\', \'stop\', \'step\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'1\', \'None\'], "
+-  }
+-  member_method {
+-    name: "arccos"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "arccosh"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "arcsin"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "arcsinh"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "arctan"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "arctan2"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "arctanh"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "argmax"
+-    argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "argmin"
+-    argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "argsort"
+-    argspec: "args=[\'a\', \'axis\', \'kind\', \'order\'], varargs=None, keywords=None, defaults=[\'-1\', \'quicksort\', \'None\'], "
+-  }
+-  member_method {
+-    name: "around"
+-    argspec: "args=[\'a\', \'decimals\'], varargs=None, keywords=None, defaults=[\'0\'], "
+-  }
+-  member_method {
+-    name: "array"
+-    argspec: "args=[\'val\', \'dtype\', \'copy\', \'ndmin\'], varargs=None, keywords=None, defaults=[\'None\', \'True\', \'0\'], "
+-  }
+-  member_method {
+-    name: "array_equal"
+-    argspec: "args=[\'a1\', \'a2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "asanyarray"
+-    argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "asarray"
+-    argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "ascontiguousarray"
+-    argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "atleast_1d"
+-    argspec: "args=[], varargs=arys, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "atleast_2d"
+-    argspec: "args=[], varargs=arys, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "atleast_3d"
+-    argspec: "args=[], varargs=arys, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "average"
+-    argspec: "args=[\'a\', \'axis\', \'weights\', \'returned\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'False\'], "
+-  }
+-  member_method {
+-    name: "bitwise_and"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "bitwise_not"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "bitwise_or"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "bitwise_xor"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "broadcast_arrays"
+-    argspec: "args=[], varargs=args, keywords=kwargs, defaults=None"
+-  }
+-  member_method {
+-    name: "broadcast_to"
+-    argspec: "args=[\'array\', \'shape\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "cbrt"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "ceil"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "clip"
+-    argspec: "args=[\'a\', \'a_min\', \'a_max\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "compress"
+-    argspec: "args=[\'condition\', \'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "concatenate"
+-    argspec: "args=[\'arys\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], "
+-  }
+-  member_method {
+-    name: "conj"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "conjugate"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "copy"
+-    argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "cos"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "cosh"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "count_nonzero"
+-    argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "cross"
+-    argspec: "args=[\'a\', \'b\', \'axisa\', \'axisb\', \'axisc\', \'axis\'], varargs=None, keywords=None, defaults=[\'-1\', \'-1\', \'-1\', \'None\'], "
+-  }
+-  member_method {
+-    name: "cumprod"
+-    argspec: "args=[\'a\', \'axis\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "cumsum"
+-    argspec: "args=[\'a\', \'axis\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "deg2rad"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "diag"
+-    argspec: "args=[\'v\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], "
+-  }
+-  member_method {
+-    name: "diag_indices"
+-    argspec: "args=[\'n\', \'ndim\'], varargs=None, keywords=None, defaults=[\'2\'], "
+-  }
+-  member_method {
+-    name: "diagflat"
+-    argspec: "args=[\'v\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], "
+-  }
+-  member_method {
+-    name: "diagonal"
+-    argspec: "args=[\'a\', \'offset\', \'axis1\', \'axis2\'], varargs=None, keywords=None, defaults=[\'0\', \'0\', \'1\'], "
+-  }
+-  member_method {
+-    name: "diff"
+-    argspec: "args=[\'a\', \'n\', \'axis\'], varargs=None, keywords=None, defaults=[\'1\', \'-1\'], "
+-  }
+-  member_method {
+-    name: "divide"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "divmod"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "dot"
+-    argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "dsplit"
+-    argspec: "args=[\'ary\', \'indices_or_sections\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "dstack"
+-    argspec: "args=[\'tup\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "einsum"
+-    argspec: "args=[\'subscripts\'], varargs=operands, keywords=kwargs, defaults=None"
+-  }
+-  member_method {
+-    name: "empty"
+-    argspec: "args=[\'shape\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], "
+-  }
+-  member_method {
+-    name: "empty_like"
+-    argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "equal"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "exp"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "exp2"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "expand_dims"
+-    argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "experimental_enable_numpy_behavior"
+-    argspec: "args=[\'prefer_float32\'], varargs=None, keywords=None, defaults=[\'False\'], "
+-  }
+-  member_method {
+-    name: "expm1"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "eye"
+-    argspec: "args=[\'N\', \'M\', \'k\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'0\', \"<class \'float\'>\"], "
+-  }
+-  member_method {
+-    name: "fabs"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "finfo"
+-    argspec: "args=[\'dtype\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "fix"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "flatten"
+-    argspec: "args=[\'a\', \'order\'], varargs=None, keywords=None, defaults=[\'C\'], "
+-  }
+-  member_method {
+-    name: "flip"
+-    argspec: "args=[\'m\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "fliplr"
+-    argspec: "args=[\'m\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "flipud"
+-    argspec: "args=[\'m\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "float_power"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "floor"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "floor_divide"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "full"
+-    argspec: "args=[\'shape\', \'fill_value\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "full_like"
+-    argspec: "args=[\'a\', \'fill_value\', \'dtype\', \'order\', \'subok\', \'shape\'], varargs=None, keywords=None, defaults=[\'None\', \'K\', \'True\', \'None\'], "
+-  }
+-  member_method {
+-    name: "gcd"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "geomspace"
+-    argspec: "args=[\'start\', \'stop\', \'num\', \'endpoint\', \'dtype\', \'axis\'], varargs=None, keywords=None, defaults=[\'50\', \'True\', \'None\', \'0\'], "
+-  }
+-  member_method {
+-    name: "greater"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "greater_equal"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "heaviside"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "hsplit"
+-    argspec: "args=[\'ary\', \'indices_or_sections\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "hstack"
+-    argspec: "args=[\'tup\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "hypot"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "identity"
+-    argspec: "args=[\'n\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], "
+-  }
+-  member_method {
+-    name: "imag"
+-    argspec: "args=[\'val\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "inner"
+-    argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "isclose"
+-    argspec: "args=[\'a\', \'b\', \'rtol\', \'atol\', \'equal_nan\'], varargs=None, keywords=None, defaults=[\'1e-05\', \'1e-08\', \'False\'], "
+-  }
+-  member_method {
+-    name: "iscomplex"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "iscomplexobj"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "isfinite"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "isinf"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "isnan"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "isneginf"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "isposinf"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "isreal"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "isrealobj"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "isscalar"
+-    argspec: "args=[\'num\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "issubdtype"
+-    argspec: "args=[\'arg1\', \'arg2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "ix_"
+-    argspec: "args=[], varargs=args, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "kron"
+-    argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "lcm"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "less"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "less_equal"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "linspace"
+-    argspec: "args=[\'start\', \'stop\', \'num\', \'endpoint\', \'retstep\', \'dtype\', \'axis\'], varargs=None, keywords=None, defaults=[\'50\', \'True\', \'False\', \"<class \'float\'>\", \'0\'], "
+-  }
+-  member_method {
+-    name: "log"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "log10"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "log1p"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "log2"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "logaddexp"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "logaddexp2"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "logical_and"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "logical_not"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "logical_or"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "logical_xor"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "logspace"
+-    argspec: "args=[\'start\', \'stop\', \'num\', \'endpoint\', \'base\', \'dtype\', \'axis\'], varargs=None, keywords=None, defaults=[\'50\', \'True\', \'10.0\', \'None\', \'0\'], "
+-  }
+-  member_method {
+-    name: "matmul"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "max"
+-    argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "maximum"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "mean"
+-    argspec: "args=[\'a\', \'axis\', \'dtype\', \'out\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "meshgrid"
+-    argspec: "args=[], varargs=xi, keywords=kwargs, defaults=None"
+-  }
+-  member_method {
+-    name: "min"
+-    argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "minimum"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "mod"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "moveaxis"
+-    argspec: "args=[\'a\', \'source\', \'destination\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "multiply"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "nanmean"
+-    argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "nanprod"
+-    argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'False\'], "
+-  }
+-  member_method {
+-    name: "nansum"
+-    argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'False\'], "
+-  }
+-  member_method {
+-    name: "ndim"
+-    argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "negative"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "nextafter"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "nonzero"
+-    argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "not_equal"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "ones"
+-    argspec: "args=[\'shape\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], "
+-  }
+-  member_method {
+-    name: "ones_like"
+-    argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "outer"
+-    argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "pad"
+-    argspec: "args=[\'array\', \'pad_width\', \'mode\'], varargs=None, keywords=kwargs, defaults=None"
+-  }
+-  member_method {
+-    name: "polyval"
+-    argspec: "args=[\'p\', \'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "positive"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "power"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "prod"
+-    argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "promote_types"
+-    argspec: "args=[\'type1\', \'type2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "ptp"
+-    argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "rad2deg"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "ravel"
+-    argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "real"
+-    argspec: "args=[\'val\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "reciprocal"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "remainder"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "repeat"
+-    argspec: "args=[\'a\', \'repeats\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "reshape"
+-    argspec: "args=[\'a\', \'newshape\', \'order\'], varargs=None, keywords=None, defaults=[\'C\'], "
+-  }
+-  member_method {
+-    name: "result_type"
+-    argspec: "args=[], varargs=arrays_and_dtypes, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "roll"
+-    argspec: "args=[\'a\', \'shift\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "rot90"
+-    argspec: "args=[\'m\', \'k\', \'axes\'], varargs=None, keywords=None, defaults=[\'1\', \'(0, 1)\'], "
+-  }
+-  member_method {
+-    name: "round"
+-    argspec: "args=[\'a\', \'decimals\'], varargs=None, keywords=None, defaults=[\'0\'], "
+-  }
+-  member_method {
+-    name: "select"
+-    argspec: "args=[\'condlist\', \'choicelist\', \'default\'], varargs=None, keywords=None, defaults=[\'0\'], "
+-  }
+-  member_method {
+-    name: "shape"
+-    argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "sign"
+-    argspec: "args=[\'x\', \'out\', \'where\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "signbit"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "sin"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "sinc"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "sinh"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "size"
+-    argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "sort"
+-    argspec: "args=[\'a\', \'axis\', \'kind\', \'order\'], varargs=None, keywords=None, defaults=[\'-1\', \'quicksort\', \'None\'], "
+-  }
+-  member_method {
+-    name: "split"
+-    argspec: "args=[\'ary\', \'indices_or_sections\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], "
+-  }
+-  member_method {
+-    name: "sqrt"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "square"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "squeeze"
+-    argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "stack"
+-    argspec: "args=[\'arrays\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], "
+-  }
+-  member_method {
+-    name: "std"
+-    argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "subtract"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "sum"
+-    argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "swapaxes"
+-    argspec: "args=[\'a\', \'axis1\', \'axis2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "take"
+-    argspec: "args=[\'a\', \'indices\', \'axis\', \'out\', \'mode\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'clip\'], "
+-  }
+-  member_method {
+-    name: "take_along_axis"
+-    argspec: "args=[\'arr\', \'indices\', \'axis\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "tan"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "tanh"
+-    argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "tensordot"
+-    argspec: "args=[\'a\', \'b\', \'axes\'], varargs=None, keywords=None, defaults=[\'2\'], "
+-  }
+-  member_method {
+-    name: "tile"
+-    argspec: "args=[\'a\', \'reps\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "trace"
+-    argspec: "args=[\'a\', \'offset\', \'axis1\', \'axis2\', \'dtype\'], varargs=None, keywords=None, defaults=[\'0\', \'0\', \'1\', \'None\'], "
+-  }
+-  member_method {
+-    name: "transpose"
+-    argspec: "args=[\'a\', \'axes\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "tri"
+-    argspec: "args=[\'N\', \'M\', \'k\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'0\', \'None\'], "
+-  }
+-  member_method {
+-    name: "tril"
+-    argspec: "args=[\'m\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], "
+-  }
+-  member_method {
+-    name: "triu"
+-    argspec: "args=[\'m\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], "
+-  }
+-  member_method {
+-    name: "true_divide"
+-    argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "vander"
+-    argspec: "args=[\'x\', \'N\', \'increasing\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
+-  }
+-  member_method {
+-    name: "var"
+-    argspec: "args=[\'a\', \'axis\', \'dtype\', \'out\', \'ddof\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'0\', \'None\'], "
+-  }
+-  member_method {
+-    name: "vdot"
+-    argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "vsplit"
+-    argspec: "args=[\'ary\', \'indices_or_sections\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "vstack"
+-    argspec: "args=[\'tup\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "where"
+-    argspec: "args=[\'condition\', \'x\', \'y\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+-  }
+-  member_method {
+-    name: "zeros"
+-    argspec: "args=[\'shape\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], "
+-  }
+-  member_method {
+-    name: "zeros_like"
+-    argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-}
+diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt b/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt
+deleted file mode 100644
+index 61a4766f3f8..00000000000
+--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt
++++ /dev/null
+@@ -1,35 +0,0 @@
+-path: "tensorflow.experimental.numpy.random"
+-tf_module {
+-  member_method {
+-    name: "poisson"
+-    argspec: "args=[\'lam\', \'size\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\'], "
+-  }
+-  member_method {
+-    name: "rand"
+-    argspec: "args=[], varargs=size, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "randint"
+-    argspec: "args=[\'low\', \'high\', \'size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \"<class \'numpy.int64\'>\"], "
+-  }
+-  member_method {
+-    name: "randn"
+-    argspec: "args=[], varargs=args, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "random"
+-    argspec: "args=[\'size\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "seed"
+-    argspec: "args=[\'s\'], varargs=None, keywords=None, defaults=None"
+-  }
+-  member_method {
+-    name: "standard_normal"
+-    argspec: "args=[\'size\'], varargs=None, keywords=None, defaults=[\'None\'], "
+-  }
+-  member_method {
+-    name: "uniform"
+-    argspec: "args=[\'low\', \'high\', \'size\'], varargs=None, keywords=None, defaults=[\'0.0\', \'1.0\', \'None\'], "
+-  }
+-}
+diff --git a/third_party/py/non_hermetic/python_configure.bzl b/third_party/py/non_hermetic/python_configure.bzl
+deleted file mode 100644
+index 300cbfb6c71..00000000000
+--- a/third_party/py/non_hermetic/python_configure.bzl
++++ /dev/null
+@@ -1,315 +0,0 @@
+-"""Repository rule for Python autoconfiguration.
+-
+-`python_configure` depends on the following environment variables:
+-
+-  * `PYTHON_BIN_PATH`: location of python binary.
+-  * `PYTHON_LIB_PATH`: Location of python libraries.
+-"""
+-
+-load(
+-    "//third_party/remote_config:common.bzl",
+-    "BAZEL_SH",
+-    "PYTHON_BIN_PATH",
+-    "PYTHON_LIB_PATH",
+-    "TF_PYTHON_CONFIG_REPO",
+-    "auto_config_fail",
+-    "config_repo_label",
+-    "execute",
+-    "get_bash_bin",
+-    "get_host_environ",
+-    "get_python_bin",
+-    "is_windows",
+-    "raw_exec",
+-    "read_dir",
+-)
+-
+-def _genrule(src_dir, genrule_name, command, outs):
+-    """Returns a string with a genrule.
+-
+-    Genrule executes the given command and produces the given outputs.
+-    """
+-    return (
+-        "genrule(\n" +
+-        '    name = "' +
+-        genrule_name + '",\n' +
+-        "    outs = [\n" +
+-        outs +
+-        "\n    ],\n" +
+-        '    cmd = """\n' +
+-        command +
+-        '\n   """,\n' +
+-        ")\n"
+-    )
+-
+-def _norm_path(path):
+-    """Returns a path with '/' and remove the trailing slash."""
+-    path = path.replace("\\", "/")
+-    if path[-1] == "/":
+-        path = path[:-1]
+-    return path
+-
+-def _symlink_genrule_for_dir(
+-        repository_ctx,
+-        src_dir,
+-        dest_dir,
+-        genrule_name,
+-        src_files = [],
+-        dest_files = []):
+-    """Returns a genrule to symlink(or copy if on Windows) a set of files.
+-
+-    If src_dir is passed, files will be read from the given directory; otherwise
+-    we assume files are in src_files and dest_files
+-    """
+-    if src_dir != None:
+-        src_dir = _norm_path(src_dir)
+-        dest_dir = _norm_path(dest_dir)
+-        files = "\n".join(read_dir(repository_ctx, src_dir))
+-
+-        # Create a list with the src_dir stripped to use for outputs.
+-        dest_files = files.replace(src_dir, "").splitlines()
+-        src_files = files.splitlines()
+-    command = []
+-    outs = []
+-    for i in range(len(dest_files)):
+-        if dest_files[i] != "":
+-            # If we have only one file to link we do not want to use the dest_dir, as
+-            # $(@D) will include the full path to the file.
+-            dest = "$(@D)/" + dest_dir + dest_files[i] if len(dest_files) != 1 else "$(@D)/" + dest_files[i]
+-
+-            # Copy the headers to create a sandboxable setup.
+-            cmd = "cp -f"
+-            command.append(cmd + ' "%s" "%s"' % (src_files[i], dest))
+-            outs.append('        "' + dest_dir + dest_files[i] + '",')
+-    genrule = _genrule(
+-        src_dir,
+-        genrule_name,
+-        " && ".join(command),
+-        "\n".join(outs),
+-    )
+-    return genrule
+-
+-def _get_python_lib(repository_ctx, python_bin):
+-    """Gets the python lib path."""
+-    python_lib = get_host_environ(repository_ctx, PYTHON_LIB_PATH)
+-    if python_lib != None:
+-        return python_lib
+-
+-    # The interesting program to execute.
+-    print_lib = [
+-        "from __future__ import print_function",
+-        "import site",
+-        "import os",
+-        "python_paths = []",
+-        "if os.getenv('PYTHONPATH') is not None:",
+-        "  python_paths = os.getenv('PYTHONPATH').split(':')",
+-        "try:",
+-        "  library_paths = site.getsitepackages()",
+-        "except AttributeError:",
+-        "  from distutils.sysconfig import get_python_lib",
+-        "  library_paths = [get_python_lib()]",
+-        "all_paths = set(python_paths + library_paths)",
+-        "paths = []",
+-        "for path in all_paths:",
+-        "  if os.path.isdir(path):",
+-        "    paths.append(path)",
+-        "if len(paths) >=1:",
+-        "  print(paths[0])",
+-    ]
+-
+-    # The below script writes the above program to a file
+-    # and executes it. This is to work around the limitation
+-    # of not being able to upload files as part of execute.
+-    cmd = "from os import linesep;"
+-    cmd += "f = open('script.py', 'w');"
+-    for line in print_lib:
+-        cmd += "f.write(\"%s\" + linesep);" % line
+-    cmd += "f.close();"
+-    cmd += "from subprocess import call;"
+-    cmd += "call([\"%s\", \"script.py\"]);" % python_bin
+-
+-    result = execute(repository_ctx, [python_bin, "-c", cmd])
+-    return result.stdout.strip()
+-
+-def _check_python_lib(repository_ctx, python_lib):
+-    """Checks the python lib path."""
+-    cmd = 'test -d "%s" -a -x "%s"' % (python_lib, python_lib)
+-    result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd])
+-    if result.return_code == 1:
+-        auto_config_fail("Invalid python library path: %s" % python_lib)
+-
+-def _check_python_bin(repository_ctx, python_bin):
+-    """Checks the python bin path."""
+-    cmd = '[[ -x "%s" ]] && [[ ! -d "%s" ]]' % (python_bin, python_bin)
+-    result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd])
+-    if result.return_code == 1:
+-        auto_config_fail("--define %s='%s' is not executable. Is it the python binary?" % (
+-            PYTHON_BIN_PATH,
+-            python_bin,
+-        ))
+-
+-def _get_python_include(repository_ctx, python_bin):
+-    """Gets the python include path."""
+-    result = execute(
+-        repository_ctx,
+-        [
+-            python_bin,
+-            "-Wignore",
+-            "-c",
+-            "import importlib; " +
+-            "import importlib.util; " +
+-            "print(importlib.import_module('distutils.sysconfig').get_python_inc() " +
+-            "if importlib.util.find_spec('distutils.sysconfig') " +
+-            "else importlib.import_module('sysconfig').get_path('include'))",
+-        ],
+-        error_msg = "Problem getting python include path.",
+-        error_details = ("Is the Python binary path set up right? " +
+-                         "(See ./configure or " + PYTHON_BIN_PATH + ".) " +
+-                         "Is distutils installed?"),
+-    )
+-    return result.stdout.splitlines()[0]
+-
+-def _get_python_import_lib_name(repository_ctx, python_bin):
+-    """Get Python import library name (pythonXY.lib) on Windows."""
+-    result = execute(
+-        repository_ctx,
+-        [
+-            python_bin,
+-            "-c",
+-            "import sys;" +
+-            'print("python" + str(sys.version_info[0]) + ' +
+-            '      str(sys.version_info[1]) + ".lib")',
+-        ],
+-        error_msg = "Problem getting python import library.",
+-        error_details = ("Is the Python binary path set up right? " +
+-                         "(See ./configure or " + PYTHON_BIN_PATH + ".) "),
+-    )
+-    return result.stdout.splitlines()[0]
+-
+-def _get_numpy_include(repository_ctx, python_bin):
+-    """Gets the numpy include path."""
+-    return execute(
+-        repository_ctx,
+-        [
+-            python_bin,
+-            "-c",
+-            "from __future__ import print_function;" +
+-            "import numpy;" +
+-            " print(numpy.get_include());",
+-        ],
+-        error_msg = "Problem getting numpy include path.",
+-        error_details = "Is numpy installed?",
+-    ).stdout.splitlines()[0]
+-
+-def _create_local_python_repository(repository_ctx):
+-    """Creates the repository containing files set up to build with Python."""
+-
+-    # Resolve all labels before doing any real work. Resolving causes the
+-    # function to be restarted with all previous state being lost. This
+-    # can easily lead to a O(n^2) runtime in the number of labels.
+-    build_tpl = repository_ctx.path(Label("//third_party/py:BUILD.tpl"))
+-
+-    python_bin = get_python_bin(repository_ctx)
+-    _check_python_bin(repository_ctx, python_bin)
+-    python_lib = _get_python_lib(repository_ctx, python_bin)
+-    _check_python_lib(repository_ctx, python_lib)
+-    python_include = _get_python_include(repository_ctx, python_bin)
+-    numpy_include = _get_numpy_include(repository_ctx, python_bin) + "/numpy"
+-    python_include_rule = _symlink_genrule_for_dir(
+-        repository_ctx,
+-        python_include,
+-        "python_include",
+-        "python_include",
+-    )
+-    python_import_lib_genrule = ""
+-
+-    # To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib
+-    # See https://docs.python.org/3/extending/windows.html
+-    if is_windows(repository_ctx):
+-        python_bin = python_bin.replace("\\", "/")
+-        python_include = _norm_path(python_include)
+-        python_import_lib_name = _get_python_import_lib_name(repository_ctx, python_bin)
+-        python_import_lib_src = python_include.rsplit("/", 1)[0] + "/libs/" + python_import_lib_name
+-        python_import_lib_genrule = _symlink_genrule_for_dir(
+-            repository_ctx,
+-            None,
+-            "",
+-            "python_import_lib",
+-            [python_import_lib_src],
+-            [python_import_lib_name],
+-        )
+-    numpy_include_rule = _symlink_genrule_for_dir(
+-        repository_ctx,
+-        numpy_include,
+-        "numpy_include/numpy",
+-        "numpy_include",
+-    )
+-
+-    platform_constraint = ""
+-    if repository_ctx.attr.platform_constraint:
+-        platform_constraint = "\"%s\"" % repository_ctx.attr.platform_constraint
+-    repository_ctx.template("BUILD", build_tpl, {
+-        "%{PYTHON_BIN_PATH}": python_bin,
+-        "%{PYTHON_INCLUDE_GENRULE}": python_include_rule,
+-        "%{PYTHON_IMPORT_LIB_GENRULE}": python_import_lib_genrule,
+-        "%{NUMPY_INCLUDE_GENRULE}": numpy_include_rule,
+-        "%{PLATFORM_CONSTRAINT}": platform_constraint,
+-    })
+-
+-def _create_remote_python_repository(repository_ctx, remote_config_repo):
+-    """Creates pointers to a remotely configured repo set up to build with Python.
+-    """
+-    repository_ctx.template("BUILD", config_repo_label(remote_config_repo, ":BUILD"), {})
+-
+-def _python_autoconf_impl(repository_ctx):
+-    """Implementation of the python_autoconf repository rule."""
+-    if get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO) != None:
+-        _create_remote_python_repository(
+-            repository_ctx,
+-            get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO),
+-        )
+-    else:
+-        _create_local_python_repository(repository_ctx)
+-
+-_ENVIRONS = [
+-    BAZEL_SH,
+-    PYTHON_BIN_PATH,
+-    PYTHON_LIB_PATH,
+-]
+-
+-local_python_configure = repository_rule(
+-    implementation = _create_local_python_repository,
+-    environ = _ENVIRONS,
+-    attrs = {
+-        "environ": attr.string_dict(),
+-        "platform_constraint": attr.string(),
+-    },
+-)
+-
+-remote_python_configure = repository_rule(
+-    implementation = _create_local_python_repository,
+-    environ = _ENVIRONS,
+-    remotable = True,
+-    attrs = {
+-        "environ": attr.string_dict(),
+-        "platform_constraint": attr.string(),
+-    },
+-)
+-
+-python_configure = repository_rule(
+-    implementation = _python_autoconf_impl,
+-    environ = _ENVIRONS + [TF_PYTHON_CONFIG_REPO],
+-    attrs = {
+-        "platform_constraint": attr.string(),
+-    },
+-)
+-"""Detects and configures the local Python.
+-
+-Add the following to your WORKSPACE FILE:
+-
+-```python
+-python_configure(name = "local_config_python")
+-```
+-
+-Args:
+-  name: A unique name for this workspace rule.
+-"""
+diff --git a/third_party/py/numpy/BUILD b/third_party/py/numpy/BUILD
+index 97c7907fc38..c80cc5287bc 100644
+--- a/third_party/py/numpy/BUILD
++++ b/third_party/py/numpy/BUILD
+@@ -2,14 +2,15 @@ licenses(["restricted"])
+ 
+ package(default_visibility = ["//visibility:public"])
+ 
+-alias(
++py_library(
+     name = "numpy",
+-    actual = "@pypi_numpy//:pkg",
++    srcs = ["tf_numpy_dummy.py"],
++    srcs_version = "PY3",
+ )
+ 
+ alias(
+     name = "headers",
+-    actual = "@pypi_numpy//:numpy_headers",
++    actual = "@local_config_python//:numpy_headers",
+ )
+ 
+ genrule(
+diff --git a/third_party/py/numpy/LICENSE b/third_party/py/numpy/LICENSE
+deleted file mode 100644
+index b9731f734f5..00000000000
+--- a/third_party/py/numpy/LICENSE
++++ /dev/null
+@@ -1,60 +0,0 @@
+-Copyright (c) 2005-2019, NumPy Developers.
+-All rights reserved.
+-
+-Redistribution and use in source and binary forms, with or without
+-modification, are permitted provided that the following conditions are
+-met:
+-
+-    * Redistributions of source code must retain the above copyright
+-       notice, this list of conditions and the following disclaimer.
+-
+-    * Redistributions in binary form must reproduce the above
+-       copyright notice, this list of conditions and the following
+-       disclaimer in the documentation and/or other materials provided
+-       with the distribution.
+-
+-    * Neither the name of the NumPy Developers nor the names of any
+-       contributors may be used to endorse or promote products derived
+-       from this software without specific prior written permission.
+-
+-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+-
+-
+-
+-The NumPy repository and source distributions bundle several libraries that are
+-compatibly licensed.  We list these here.
+-
+-Name: Numpydoc
+-Files: doc/sphinxext/numpydoc/*
+-License: 2-clause BSD
+-  For details, see doc/sphinxext/LICENSE.txt
+-
+-Name: scipy-sphinx-theme
+-Files: doc/scipy-sphinx-theme/*
+-License: 3-clause BSD, PSF and Apache 2.0
+-  For details, see doc/scipy-sphinx-theme/LICENSE.txt
+-
+-Name: lapack-lite
+-Files: numpy/linalg/lapack_lite/*
+-License: 3-clause BSD
+-  For details, see numpy/linalg/lapack_lite/LICENSE.txt
+-
+-Name: tempita
+-Files: tools/npy_tempita/*
+-License: BSD derived
+-  For details, see tools/npy_tempita/license.txt
+-
+-Name: dragon4
+-Files: numpy/core/src/multiarray/dragon4.c
+-License: One of a kind
+-  For license text, see numpy/core/src/multiarray/dragon4.c
+diff --git a/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt b/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
+index 05939b53b5f..2f5490ad0c9 100644
+--- a/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
++++ b/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
+@@ -390,7 +390,7 @@ tf_module {
+   }
+   member_method {
+     name: "experimental_enable_numpy_behavior"
+-    argspec: "args=[\'prefer_float32\', \'dtype_conversion_mode\'], varargs=None, keywords=None, defaults=[\'False\', \'legacy\'], "
++    argspec: "args=[\'prefer_float32\'], varargs=None, keywords=None, defaults=[\'False\'], "
+   }
+   member_method {
+     name: "expm1"
+diff --git a/third_party/py/python_configure.bzl b/third_party/py/python_configure.bzl
+index 3728a91b931..300cbfb6c71 100644
+--- a/third_party/py/python_configure.bzl
++++ b/third_party/py/python_configure.bzl
+@@ -1,4 +1,9 @@
+ """Repository rule for Python autoconfiguration.
++
++`python_configure` depends on the following environment variables:
++
++  * `PYTHON_BIN_PATH`: location of python binary.
++  * `PYTHON_LIB_PATH`: Location of python libraries.
+ """
+ 
+ load(
+@@ -6,8 +11,195 @@ load(
+     "BAZEL_SH",
+     "PYTHON_BIN_PATH",
+     "PYTHON_LIB_PATH",
++    "TF_PYTHON_CONFIG_REPO",
++    "auto_config_fail",
++    "config_repo_label",
++    "execute",
++    "get_bash_bin",
++    "get_host_environ",
++    "get_python_bin",
++    "is_windows",
++    "raw_exec",
++    "read_dir",
+ )
+ 
++def _genrule(src_dir, genrule_name, command, outs):
++    """Returns a string with a genrule.
++
++    Genrule executes the given command and produces the given outputs.
++    """
++    return (
++        "genrule(\n" +
++        '    name = "' +
++        genrule_name + '",\n' +
++        "    outs = [\n" +
++        outs +
++        "\n    ],\n" +
++        '    cmd = """\n' +
++        command +
++        '\n   """,\n' +
++        ")\n"
++    )
++
++def _norm_path(path):
++    """Returns a path with '/' and remove the trailing slash."""
++    path = path.replace("\\", "/")
++    if path[-1] == "/":
++        path = path[:-1]
++    return path
++
++def _symlink_genrule_for_dir(
++        repository_ctx,
++        src_dir,
++        dest_dir,
++        genrule_name,
++        src_files = [],
++        dest_files = []):
++    """Returns a genrule to symlink(or copy if on Windows) a set of files.
++
++    If src_dir is passed, files will be read from the given directory; otherwise
++    we assume files are in src_files and dest_files
++    """
++    if src_dir != None:
++        src_dir = _norm_path(src_dir)
++        dest_dir = _norm_path(dest_dir)
++        files = "\n".join(read_dir(repository_ctx, src_dir))
++
++        # Create a list with the src_dir stripped to use for outputs.
++        dest_files = files.replace(src_dir, "").splitlines()
++        src_files = files.splitlines()
++    command = []
++    outs = []
++    for i in range(len(dest_files)):
++        if dest_files[i] != "":
++            # If we have only one file to link we do not want to use the dest_dir, as
++            # $(@D) will include the full path to the file.
++            dest = "$(@D)/" + dest_dir + dest_files[i] if len(dest_files) != 1 else "$(@D)/" + dest_files[i]
++
++            # Copy the headers to create a sandboxable setup.
++            cmd = "cp -f"
++            command.append(cmd + ' "%s" "%s"' % (src_files[i], dest))
++            outs.append('        "' + dest_dir + dest_files[i] + '",')
++    genrule = _genrule(
++        src_dir,
++        genrule_name,
++        " && ".join(command),
++        "\n".join(outs),
++    )
++    return genrule
++
++def _get_python_lib(repository_ctx, python_bin):
++    """Gets the python lib path."""
++    python_lib = get_host_environ(repository_ctx, PYTHON_LIB_PATH)
++    if python_lib != None:
++        return python_lib
++
++    # The interesting program to execute.
++    print_lib = [
++        "from __future__ import print_function",
++        "import site",
++        "import os",
++        "python_paths = []",
++        "if os.getenv('PYTHONPATH') is not None:",
++        "  python_paths = os.getenv('PYTHONPATH').split(':')",
++        "try:",
++        "  library_paths = site.getsitepackages()",
++        "except AttributeError:",
++        "  from distutils.sysconfig import get_python_lib",
++        "  library_paths = [get_python_lib()]",
++        "all_paths = set(python_paths + library_paths)",
++        "paths = []",
++        "for path in all_paths:",
++        "  if os.path.isdir(path):",
++        "    paths.append(path)",
++        "if len(paths) >=1:",
++        "  print(paths[0])",
++    ]
++
++    # The below script writes the above program to a file
++    # and executes it. This is to work around the limitation
++    # of not being able to upload files as part of execute.
++    cmd = "from os import linesep;"
++    cmd += "f = open('script.py', 'w');"
++    for line in print_lib:
++        cmd += "f.write(\"%s\" + linesep);" % line
++    cmd += "f.close();"
++    cmd += "from subprocess import call;"
++    cmd += "call([\"%s\", \"script.py\"]);" % python_bin
++
++    result = execute(repository_ctx, [python_bin, "-c", cmd])
++    return result.stdout.strip()
++
++def _check_python_lib(repository_ctx, python_lib):
++    """Checks the python lib path."""
++    cmd = 'test -d "%s" -a -x "%s"' % (python_lib, python_lib)
++    result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd])
++    if result.return_code == 1:
++        auto_config_fail("Invalid python library path: %s" % python_lib)
++
++def _check_python_bin(repository_ctx, python_bin):
++    """Checks the python bin path."""
++    cmd = '[[ -x "%s" ]] && [[ ! -d "%s" ]]' % (python_bin, python_bin)
++    result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd])
++    if result.return_code == 1:
++        auto_config_fail("--define %s='%s' is not executable. Is it the python binary?" % (
++            PYTHON_BIN_PATH,
++            python_bin,
++        ))
++
++def _get_python_include(repository_ctx, python_bin):
++    """Gets the python include path."""
++    result = execute(
++        repository_ctx,
++        [
++            python_bin,
++            "-Wignore",
++            "-c",
++            "import importlib; " +
++            "import importlib.util; " +
++            "print(importlib.import_module('distutils.sysconfig').get_python_inc() " +
++            "if importlib.util.find_spec('distutils.sysconfig') " +
++            "else importlib.import_module('sysconfig').get_path('include'))",
++        ],
++        error_msg = "Problem getting python include path.",
++        error_details = ("Is the Python binary path set up right? " +
++                         "(See ./configure or " + PYTHON_BIN_PATH + ".) " +
++                         "Is distutils installed?"),
++    )
++    return result.stdout.splitlines()[0]
++
++def _get_python_import_lib_name(repository_ctx, python_bin):
++    """Get Python import library name (pythonXY.lib) on Windows."""
++    result = execute(
++        repository_ctx,
++        [
++            python_bin,
++            "-c",
++            "import sys;" +
++            'print("python" + str(sys.version_info[0]) + ' +
++            '      str(sys.version_info[1]) + ".lib")',
++        ],
++        error_msg = "Problem getting python import library.",
++        error_details = ("Is the Python binary path set up right? " +
++                         "(See ./configure or " + PYTHON_BIN_PATH + ".) "),
++    )
++    return result.stdout.splitlines()[0]
++
++def _get_numpy_include(repository_ctx, python_bin):
++    """Gets the numpy include path."""
++    return execute(
++        repository_ctx,
++        [
++            python_bin,
++            "-c",
++            "from __future__ import print_function;" +
++            "import numpy;" +
++            " print(numpy.get_include());",
++        ],
++        error_msg = "Problem getting numpy include path.",
++        error_details = "Is numpy installed?",
++    ).stdout.splitlines()[0]
++
+ def _create_local_python_repository(repository_ctx):
+     """Creates the repository containing files set up to build with Python."""
+ 
+@@ -15,14 +207,68 @@ def _create_local_python_repository(repository_ctx):
+     # function to be restarted with all previous state being lost. This
+     # can easily lead to a O(n^2) runtime in the number of labels.
+     build_tpl = repository_ctx.path(Label("//third_party/py:BUILD.tpl"))
++
++    python_bin = get_python_bin(repository_ctx)
++    _check_python_bin(repository_ctx, python_bin)
++    python_lib = _get_python_lib(repository_ctx, python_bin)
++    _check_python_lib(repository_ctx, python_lib)
++    python_include = _get_python_include(repository_ctx, python_bin)
++    numpy_include = _get_numpy_include(repository_ctx, python_bin) + "/numpy"
++    python_include_rule = _symlink_genrule_for_dir(
++        repository_ctx,
++        python_include,
++        "python_include",
++        "python_include",
++    )
++    python_import_lib_genrule = ""
++
++    # To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib
++    # See https://docs.python.org/3/extending/windows.html
++    if is_windows(repository_ctx):
++        python_bin = python_bin.replace("\\", "/")
++        python_include = _norm_path(python_include)
++        python_import_lib_name = _get_python_import_lib_name(repository_ctx, python_bin)
++        python_import_lib_src = python_include.rsplit("/", 1)[0] + "/libs/" + python_import_lib_name
++        python_import_lib_genrule = _symlink_genrule_for_dir(
++            repository_ctx,
++            None,
++            "",
++            "python_import_lib",
++            [python_import_lib_src],
++            [python_import_lib_name],
++        )
++    numpy_include_rule = _symlink_genrule_for_dir(
++        repository_ctx,
++        numpy_include,
++        "numpy_include/numpy",
++        "numpy_include",
++    )
++
+     platform_constraint = ""
+     if repository_ctx.attr.platform_constraint:
+         platform_constraint = "\"%s\"" % repository_ctx.attr.platform_constraint
+-    repository_ctx.template("BUILD", build_tpl, {"%{PLATFORM_CONSTRAINT}": platform_constraint})
++    repository_ctx.template("BUILD", build_tpl, {
++        "%{PYTHON_BIN_PATH}": python_bin,
++        "%{PYTHON_INCLUDE_GENRULE}": python_include_rule,
++        "%{PYTHON_IMPORT_LIB_GENRULE}": python_import_lib_genrule,
++        "%{NUMPY_INCLUDE_GENRULE}": numpy_include_rule,
++        "%{PLATFORM_CONSTRAINT}": platform_constraint,
++    })
++
++def _create_remote_python_repository(repository_ctx, remote_config_repo):
++    """Creates pointers to a remotely configured repo set up to build with Python.
++    """
++    repository_ctx.template("BUILD", config_repo_label(remote_config_repo, ":BUILD"), {})
+ 
+ def _python_autoconf_impl(repository_ctx):
+     """Implementation of the python_autoconf repository rule."""
+-    _create_local_python_repository(repository_ctx)
++    if get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO) != None:
++        _create_remote_python_repository(
++            repository_ctx,
++            get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO),
++        )
++    else:
++        _create_local_python_repository(repository_ctx)
+ 
+ _ENVIRONS = [
+     BAZEL_SH,
+@@ -32,6 +278,7 @@ _ENVIRONS = [
+ 
+ local_python_configure = repository_rule(
+     implementation = _create_local_python_repository,
++    environ = _ENVIRONS,
+     attrs = {
+         "environ": attr.string_dict(),
+         "platform_constraint": attr.string(),
+@@ -50,6 +297,7 @@ remote_python_configure = repository_rule(
+ 
+ python_configure = repository_rule(
+     implementation = _python_autoconf_impl,
++    environ = _ENVIRONS + [TF_PYTHON_CONFIG_REPO],
+     attrs = {
+         "platform_constraint": attr.string(),
+     },
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0013-installation-remove-cp_local_config_python.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0013-installation-remove-cp_local_config_python.patch
new file mode 100644
index 000000000000..d6c502878849
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0013-installation-remove-cp_local_config_python.patch
@@ -0,0 +1,68 @@
+From 9a0eb9b34277229370d8df8407e4b99c13a6da0f Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 22 Dec 2023 20:25:52 +0800
+Subject: [PATCH 13/13] installation: remove `cp_local_config_python`
+
+Revert https://github.com/tensorflow/tensorflow/commit/a034b3d48a9d3dbccff22800ab4b435a89f45103
+---
+ .../tools/pip_package/build_pip_package.sh    | 25 -------------------
+ 1 file changed, 25 deletions(-)
+
+diff --git a/tensorflow/tools/pip_package/build_pip_package.sh b/tensorflow/tools/pip_package/build_pip_package.sh
+index 4a2d42bba58..af76ca4d978 100755
+--- a/tensorflow/tools/pip_package/build_pip_package.sh
++++ b/tensorflow/tools/pip_package/build_pip_package.sh
+@@ -47,22 +47,6 @@ function cp_external() {
+   cp "${src_dir}/local_config_cuda/cuda/cuda/cuda_config.h" "${dest_dir}/local_config_cuda/cuda/cuda/"
+ }
+ 
+-function cp_local_config_python() {
+-  local src_dir=$1
+-  local dest_dir=$2
+-  pushd .
+-  cd "$src_dir"
+-  mkdir -p "${dest_dir}/local_config_python/numpy_include/"
+-  cp -r "pypi_numpy/site-packages/numpy/core/include/numpy" "${dest_dir}/local_config_python/numpy_include/"
+-  mkdir -p "${dest_dir}/local_config_python/python_include/"
+-  if is_windows; then
+-    cp -r python_*/include/* "${dest_dir}/local_config_python/python_include/"
+-  else
+-    cp -r python_*/include/python*/* "${dest_dir}/local_config_python/python_include/"
+-  fi
+-  popd
+-}
+-
+ function copy_xla_aot_runtime_sources() {
+   local src_dir=$1
+   local dst_dir=$2
+@@ -174,9 +158,6 @@ function prepare_src() {
+     cp_external \
+       bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles \
+       "${EXTERNAL_INCLUDES}/"
+-    cp_local_config_python \
+-      bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles \
+-      "${EXTERNAL_INCLUDES}/"
+     copy_xla_aot_runtime_sources \
+       bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles/org_tensorflow \
+       "${XLA_AOT_RUNTIME_SOURCES}/"
+@@ -220,17 +201,11 @@ function prepare_src() {
+       cp_external \
+         bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow/external \
+         "${EXTERNAL_INCLUDES}"
+-      cp_local_config_python \
+-        bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow/external \
+-        "${EXTERNAL_INCLUDES}"
+     else
+       # New-style runfiles structure (--nolegacy_external_runfiles).
+       cp_external \
+         bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles \
+         "${EXTERNAL_INCLUDES}"
+-      cp_local_config_python \
+-        bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles \
+-        "${EXTERNAL_INCLUDES}"
+     fi
+     copy_xla_aot_runtime_sources \
+       bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow \
+-- 
+2.41.0
+

diff --git a/sci-libs/tensorflow/tensorflow-2.14.1.ebuild b/sci-libs/tensorflow/tensorflow-2.14.1.ebuild
new file mode 100644
index 000000000000..5e4117a37653
--- /dev/null
+++ b/sci-libs/tensorflow/tensorflow-2.14.1.ebuild
@@ -0,0 +1,446 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+DISTUTILS_OPTIONAL=1
+PYTHON_COMPAT=( python3_{10..11} )
+MY_PV=${PV/_rc/-rc}
+MY_P=${PN}-${MY_PV}
+DEP_VER="$(ver_cut 1-2)"
+
+inherit bazel check-reqs cuda distutils-r1 flag-o-matic prefix toolchain-funcs
+
+DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
+HOMEPAGE="https://www.tensorflow.org/"
+
+RESTRICT="test" # Tests need GPU access
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+IUSE="cuda mpi +python xla"
+CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
+for i in $CPU_USE_FLAGS_X86; do
+	IUSE+=" cpu_flags_x86_${i}"
+done
+
+# distfiles that bazel uses for the workspace, will be copied to basel-distdir
+# pkgcheck complains but do NOT change the .zip to .tar.gz, bazel requires the exact tarball (basename and sha256).
+# the build will fail if different archives are used.
+bazel_external_uris="
+	https://github.com/Maratyszcza/FP16/archive/4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip -> FP16-4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip
+	https://github.com/Maratyszcza/FXdiv/archive/63058eff77e11aa15bf531df5dd34395ec3017c8.zip -> FXdiv-63058eff77e11aa15bf531df5dd34395ec3017c8.zip
+	https://github.com/Maratyszcza/pthreadpool/archive/b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip -> pthreadpool-b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip
+	https://github.com/bazelbuild/apple_support/releases/download/1.6.0/apple_support.1.6.0.tar.gz
+	https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz
+	https://github.com/bazelbuild/bazel-toolchains/archive/8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz -> bazel-toolchains-8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz
+	https://github.com/bazelbuild/platforms/releases/download/0.0.6/platforms-0.0.6.tar.gz -> bazelbuild-platforms-0.0.6.tar.gz
+	https://github.com/bazelbuild/rules_android/archive/v0.1.1.zip -> bazelbuild-rules_android-v0.1.1.zip
+	https://github.com/bazelbuild/rules_apple/releases/download/2.3.0/rules_apple.2.3.0.tar.gz
+	https://github.com/bazelbuild/rules_cc/archive/081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz -> bazelbuild-rules_cc-081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz
+	https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz
+	https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz
+	https://github.com/bazelbuild/rules_foreign_cc/archive/0.7.1.tar.gz -> bazelbuild-rules_foreign_cc-0.7.1.tar.gz
+	https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip
+	https://github.com/bazelbuild/rules_java/releases/download/5.5.1/rules_java-5.5.1.tar.gz -> bazelbuild-rules_java-5.5.1.tar.gz
+	https://github.com/bazelbuild/rules_jvm_external/archive/4.3.zip -> bazelbuild-rules_jvm_external-4.3.zip
+	https://github.com/bazelbuild/rules_pkg/releases/download/0.7.1/rules_pkg-0.7.1.tar.gz -> bazelbuild-rules_pkg-0.7.1.tar.gz
+	https://github.com/bazelbuild/rules_proto/archive/11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz -> bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz
+	https://github.com/bazelbuild/rules_python/releases/download/0.1.0/rules_python-0.1.0.tar.gz -> bazelbuild-rules_python-0.1.0.tar.gz
+	https://github.com/bazelbuild/rules_swift/releases/download/1.0.0/rules_swift.1.0.0.tar.gz -> bazelbuild-rules_swift.1.0.0.tar.gz
+	https://github.com/dmlc/dlpack/archive/9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz -> dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz
+	https://github.com/google/XNNPACK/archive/b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip -> XNNPACK-b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip
+	https://github.com/google/benchmark/archive/f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz -> benchmark-f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz
+	https://github.com/google/farmhash/archive/0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz -> farmhash-0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz
+	https://github.com/google/gemmlowp/archive/e844ffd17118c1e17d94e1ba4354c075a4577b88.zip -> gemmlowp-e844ffd17118c1e17d94e1ba4354c075a4577b88.zip
+	https://github.com/google/highwayhash/archive/c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz -> highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz
+	https://github.com/google/re2/archive/03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz -> re2-03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz
+	https://github.com/google/ruy/archive/3286a34cc8de6149ac6844107dfdffac91531e72.zip -> ruy-3286a34cc8de6149ac6844107dfdffac91531e72.zip
+	https://github.com/googleapis/googleapis/archive/6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz -> googleapis-6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz
+	https://github.com/jax-ml/ml_dtypes/archive/5b9fc9ad978757654843f4a8d899715dbea30e88/ml_dtypes-5b9fc9ad978757654843f4a8d899715dbea30e88.tar.gz
+	https://github.com/joe-kuo/sobol_data/archive/835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz -> sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz
+	https://github.com/llvm/llvm-project/archive/668e33c6401abe7844691fb7d47a3cf2d2012dbc.tar.gz -> llvm-project-668e33c6401abe7844691fb7d47a3cf2d2012dbc.tar.gz
+	https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/openmp-10.0.1.src.tar.xz -> llvmorg-10.0.1-openmp-10.0.1.src.tar.xz
+	https://github.com/mborgerding/kissfft/archive/131.1.0.tar.gz -> kissfft-131.1.0.tar.gz
+	https://github.com/oneapi-src/oneDNN/archive/refs/tags/v3.2.1.tar.gz -> oneDNN-v3.2.1.tar.gz
+	https://github.com/openxla/stablehlo/archive/9ae6c373a6e2941ff84a8831bb3724728cb2b49a.zip -> openxla-stablehlo-9ae6c373a6e2941ff84a8831bb3724728cb2b49a.zip
+	https://github.com/openxla/triton/archive/cl546794996.tar.gz -> openxla-triton-cl546794996.tar.gz
+	https://github.com/petewarden/OouraFFT/archive/v1.0.tar.gz -> OouraFFT-v1.0.tar.gz
+	https://github.com/protocolbuffers/protobuf/archive/v3.21.9.zip -> protobuf-3.21.9.zip
+	https://github.com/pybind/pybind11_abseil/archive/2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz -> pybind11_abseil-2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz
+	https://github.com/pybind/pybind11_bazel/archive/72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz -> pybind11_bazel-72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz
+	https://github.com/pybind/pybind11_protobuf/archive/80f3440cd8fee124e077e2e47a8a17b78b451363.zip -> pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363.zip
+	https://github.com/pytorch/cpuinfo/archive/87d8234510367db49a65535021af5e1838a65ac2.tar.gz -> pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.tar.gz
+	https://github.com/pytorch/cpuinfo/archive/87d8234510367db49a65535021af5e1838a65ac2.zip -> pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.zip
+	https://github.com/tensorflow/runtime/archive/769f5cc9b8732933140b09e8808d13614182b496.tar.gz -> tensorflow-runtime-769f5cc9b8732933140b09e8808d13614182b496.tar.gz
+	https://gitlab.com/libeigen/eigen/-/archive/0b51f763cbbd0ed08168f88972724329f0375498/eigen-0b51f763cbbd0ed08168f88972724329f0375498.tar.gz
+	cuda? (
+		https://github.com/NVIDIA/cudnn-frontend/archive/refs/tags/v0.9.zip -> cudnn-frontend-v0.9.zip
+		https://github.com/NVlabs/cub/archive/1.9.9.zip -> cub-1.9.9.zip
+		https://github.com/nvidia/nccl/archive/v2.16.5-1.tar.gz -> nvidia-nccl-v2.16.5-1.tar.gz
+	)
+	python? (
+		https://github.com/intel/ARM_NEON_2_x86_SSE/archive/a15b489e1222b2087007546b4912e21293ea86ff.tar.gz -> ARM_NEON_2_x86_SSE-a15b489e1222b2087007546b4912e21293ea86ff.tar.gz
+		https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt
+	)"
+
+SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
+		${bazel_external_uris}"
+
+# abseil-cpp-20211102.0-r0 does not work with NVCC
+# check flatbuffers version in tensorflow/lite/schema/schema_generated.h
+RDEPEND="
+	app-arch/snappy
+	=dev-cpp/abseil-cpp-20230125.2*:=
+	dev-db/sqlite
+	dev-libs/double-conversion
+	dev-libs/icu:=
+	>=dev-libs/jsoncpp-1.9.2:=
+	>=dev-libs/nsync-1.25.0
+	dev-libs/openssl:0=
+	>=dev-libs/protobuf-3.13.0:=
+	>=dev-libs/re2-0.2019.06.01:=
+	media-libs/giflib
+	media-libs/libjpeg-turbo
+	media-libs/libpng:0
+	>=net-libs/grpc-1.28:=
+	net-misc/curl
+	sys-libs/zlib
+	>=sys-apps/hwloc-2:=
+	cuda? (
+		dev-util/nvidia-cuda-toolkit:=[profiler]
+		=dev-libs/cudnn-8*
+	)
+	mpi? ( virtual/mpi )
+	python? (
+		${PYTHON_DEPS}
+		~dev-libs/flatbuffers-23.5.26:=
+		dev-python/absl-py[${PYTHON_USEDEP}]
+		>=dev-python/astor-0.7.1[${PYTHON_USEDEP}]
+		dev-python/astunparse[${PYTHON_USEDEP}]
+		dev-python/clang-python[${PYTHON_USEDEP}]
+		dev-python/dill[${PYTHON_USEDEP}]
+		~dev-python/flatbuffers-23.5.26[${PYTHON_USEDEP}]
+		>=dev-python/gast-0.3.3[${PYTHON_USEDEP}]
+		dev-python/h5py[${PYTHON_USEDEP}]
+		<dev-python/ml_dtypes-0.3.0[${PYTHON_USEDEP}]
+		>=dev-python/numpy-1.19[${PYTHON_USEDEP}]
+		>=dev-python/google-pasta-0.1.8[${PYTHON_USEDEP}]
+		>=dev-python/opt-einsum-3.3.0[${PYTHON_USEDEP}]
+		>=dev-python/protobuf-python-3.13.0[${PYTHON_USEDEP}]
+		dev-python/pybind11[${PYTHON_USEDEP}]
+		dev-python/six[${PYTHON_USEDEP}]
+		dev-python/tblib[${PYTHON_USEDEP}]
+		dev-python/termcolor[${PYTHON_USEDEP}]
+		dev-python/typing-extensions[${PYTHON_USEDEP}]
+		>=dev-python/grpcio-1.28[${PYTHON_USEDEP}]
+		>=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
+		>=net-libs/google-cloud-cpp-0.10.0
+		=sci-visualization/tensorboard-${DEP_VER}*[${PYTHON_USEDEP}]
+	)"
+DEPEND="${RDEPEND}
+	python? (
+		dev-python/mock
+		dev-python/setuptools
+	)"
+PDEPEND="python? (
+		=sci-libs/keras-${DEP_VER}*[${PYTHON_USEDEP}]
+		=sci-libs/tensorflow-estimator-${DEP_VER}*[${PYTHON_USEDEP}]
+	)"
+#	>=dev-libs/protobuf-3.8.0
+BDEPEND="
+	app-arch/unzip
+	=dev-build/bazel-6*
+	<dev-build/bazel-6.3
+	dev-java/java-config
+	cuda? (
+		>=dev-util/nvidia-cuda-toolkit-9.1[profiler]
+	)
+	!python? ( dev-lang/python )
+	python? (
+		dev-python/cython
+		dev-python/mock
+		>=dev-python/grpcio-tools-1.28
+	)
+	dev-util/patchelf"
+REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
+
+S="${WORKDIR}/${MY_P}"
+
+DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
+CHECKREQS_MEMORY="5G"
+CHECKREQS_DISK_BUILD="10G"
+
+PATCHES=(
+	"${FILESDIR}/${P}-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch"
+	"${FILESDIR}/${P}-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch"
+	"${FILESDIR}/${P}-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch"
+	"${FILESDIR}/${P}-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch"
+	"${FILESDIR}/${P}-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch"
+	"${FILESDIR}/${P}-0006-systemlib-Update-targets-for-absl_py.patch"
+	"${FILESDIR}/${P}-0007-systemlib-Add-well_known_types_py_pb2-target.patch"
+	"${FILESDIR}/${P}-0008-Relax-setup.py-version-requirements.patch"
+	"${FILESDIR}/${P}-0009-systemlib-update-targets-for-absl.patch"
+	"${FILESDIR}/${P}-0010-systemlib-fix-missing-osx-in-pybind11.patch"
+	"${FILESDIR}/${P}-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch"
+	"${FILESDIR}/${P}-0012-build-use-non-hermetic-python.patch"
+	"${FILESDIR}/${P}-0013-installation-remove-cp_local_config_python.patch"
+)
+
+get-cpu-flags() {
+	local i f=()
+	# Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
+	for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
+		use cpu_flags_x86_${i} && f+=( -m${i/_/.} )
+	done
+	use cpu_flags_x86_fma3 && f+=( -mfma )
+	echo "${f[*]}"
+}
+
+pkg_setup() {
+	local num_pythons_enabled
+	num_pythons_enabled=0
+	count_impls() {
+		num_pythons_enabled=$((${num_pythons_enabled} + 1))
+	}
+	use python && python_foreach_impl count_impls
+
+	# 10G to build C/C++ libs, 6G per python impl
+	CHECKREQS_DISK_BUILD="$((10 + 6 * ${num_pythons_enabled}))G"
+	check-reqs_pkg_setup
+}
+
+src_unpack() {
+	# Only unpack the main distfile
+	unpack "${P}.tar.gz"
+	bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+	export TF_PYTHON_VERSION="${EPYTHON/python/}"
+
+	append-flags $(get-cpu-flags)
+	append-cxxflags -std=c++17
+	export BUILD_CXXFLAGS+=" -std=c++17"
+	filter-flags '-fvtable-verify=@(std|preinit)'
+	bazel_setup_bazelrc
+
+	# Relax version checks in setup.py
+	sed -i "/^    '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die
+
+	# Prefixify hard-coded command locations
+	hprefixify -w /host_compiler_prefix/ third_party/gpus/cuda_configure.bzl
+
+	default
+	use python && python_copy_sources
+
+	use cuda && cuda_add_sandbox
+}
+
+src_configure() {
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+	export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+	do_configure() {
+		export CC_OPT_FLAGS=" "
+		export TF_ENABLE_XLA=$(usex xla 1 0)
+		export TF_NEED_OPENCL_SYCL=0
+		export TF_NEED_OPENCL=0
+		export TF_NEED_COMPUTECPP=0
+		export TF_NEED_ROCM=0
+		export TF_NEED_MPI=$(usex mpi 1 0)
+		export TF_SET_ANDROID_WORKSPACE=0
+
+		if use python; then
+			export PYTHON_BIN_PATH="${PYTHON}"
+			export PYTHON_LIB_PATH="$(python_get_sitedir)"
+		else
+			export PYTHON_BIN_PATH="$(which python)"
+			export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"
+		fi
+
+		export TF_NEED_CUDA=$(usex cuda 1 0)
+		export TF_DOWNLOAD_CLANG=0
+		export TF_CUDA_CLANG=0
+		export TF_NEED_TENSORRT=0	# $(usex cuda 1 0)
+		if use cuda; then
+			export TF_CUDA_PATHS="${EPREFIX}/opt/cuda"
+			export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"
+			export TF_CUDA_VERSION="$(cuda_toolkit_version)"
+			export TF_CUDNN_VERSION="$(cuda_cudnn_version)"
+			einfo "Setting CUDA version: $TF_CUDA_VERSION"
+			einfo "Setting CUDNN version: $TF_CUDNN_VERSION"
+
+			if [[ $(cuda-config -s) != *$(gcc-version)* ]]; then
+				ewarn "TensorFlow is being built with Nvidia CUDA support. Your default compiler"
+				ewarn "version is not supported by the currently installed CUDA. TensorFlow will"
+				ewarn "instead be compiled using: ${GCC_HOST_COMPILER_PATH}."
+				ewarn "If the build fails with linker errors try rebuilding the relevant"
+				ewarn "dependencies using the same compiler version."
+			fi
+
+			if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then
+				ewarn "WARNING: TensorFlow is being built with its default CUDA compute capabilities: 3.5 and 7.0."
+				ewarn "These may not be optimal for your GPU."
+				ewarn ""
+				ewarn "To configure TensorFlow with the CUDA compute capability that is optimal for your GPU,"
+				ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow."
+				ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5"
+				ewarn ""
+				ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus"
+				ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'"
+			fi
+		fi
+
+		# com_googlesource_code_re2 weird branch using absl, doesnt work with released re2
+		# com_google_protobuf is disabled due to https://github.com/tensorflow/tensorflow/issues/61593
+		local SYSLIBS=(
+			absl_py
+			astor_archive
+			astunparse_archive
+			boringssl
+			com_github_googlecloudplatform_google_cloud_cpp
+			com_github_grpc_grpc
+			com_google_absl
+			# com_google_protobuf
+			curl
+			cython
+			dill_archive
+			double_conversion
+			flatbuffers
+			functools32_archive
+			gast_archive
+			gif
+			hwloc
+			icu
+			jsoncpp_git
+			libjpeg_turbo
+			nasm
+			nsync
+			opt_einsum_archive
+			org_sqlite
+			pasta
+			png
+			pybind11
+			six_archive
+			snappy
+			tblib_archive
+			termcolor_archive
+			typing_extensions_archive
+			wrapt
+			zlib
+		)
+
+		export TF_SYSTEM_LIBS="${SYSLIBS[@]}"
+		export TF_IGNORE_MAX_BAZEL_VERSION=1
+
+		# This is not autoconf
+		./configure || die
+
+		echo 'build --config=noaws --config=nohdfs --config=nonccl' >> .bazelrc || die
+		echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
+		echo "build --action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
+		echo "build --host_action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
+
+		for cflag in $($(tc-getPKG_CONFIG) jsoncpp --cflags)
+		do
+			echo "build --copt=\"${cflag}\"" >> .bazelrc || die
+			echo "build --host_copt=\"${cflag}\"" >> .bazelrc || die
+		done
+	}
+	if use python; then
+		python_foreach_impl run_in_build_dir do_configure
+	else
+		do_configure
+	fi
+}
+
+src_compile() {
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+	export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+	if use python; then
+		python_setup
+		BUILD_DIR="${S}-${EPYTHON/./_}"
+		cd "${BUILD_DIR}" || die
+	fi
+
+	# fail early if any deps are missing
+	ebazel build -k --nobuild \
+		//tensorflow:libtensorflow_framework.so \
+		//tensorflow:libtensorflow.so \
+		//tensorflow:libtensorflow_cc.so \
+		$(usex python '//tensorflow/tools/pip_package:build_pip_package' '')
+
+	ebazel build \
+		//tensorflow:libtensorflow_framework.so \
+		//tensorflow:libtensorflow.so
+	ebazel build //tensorflow:libtensorflow_cc.so
+	ebazel build //tensorflow:install_headers
+	ebazel shutdown
+
+	do_compile() {
+		ebazel build //tensorflow/tools/pip_package:build_pip_package
+		ebazel shutdown
+	}
+	BUILD_DIR="${S}"
+	cd "${BUILD_DIR}" || die
+	use python && python_foreach_impl run_in_build_dir do_compile
+}
+
+src_install() {
+	local i l
+	export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+	export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+	do_install() {
+		einfo "Installing ${EPYTHON} files"
+		local srcdir="${T}/src-${MULTIBUILD_VARIANT}"
+		mkdir -p "${srcdir}" || die
+		bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
+		cd "${srcdir}" || die
+		esetup.py install
+
+		# libtensorflow_framework.so and libtensorflow_cc.so is in /usr/lib already
+		rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_framework.so* || die
+		rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_cc.so* || die
+		python_optimize
+	}
+
+	if use python; then
+		python_foreach_impl run_in_build_dir do_install
+
+		# Symlink to python-exec scripts
+		for i in "${ED}"/usr/lib/python-exec/*/*; do
+			n="${i##*/}"
+			[[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"
+		done
+
+		python_setup
+		local BUILD_DIR="${S}-${EPYTHON/./_}"
+		cd "${BUILD_DIR}" || die
+	fi
+
+	einfo "Installing headers"
+	insinto /usr/include/${PN}/
+	doins -r bazel-bin/tensorflow/include/*
+
+	einfo "Installing libs"
+	# Generate pkg-config file
+	${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
+	insinto /usr/$(get_libdir)/pkgconfig
+	doins ${PN}.pc ${PN}_cc.pc
+
+	for l in libtensorflow{,_framework,_cc}.so; do
+		patchelf --add-rpath '/opt/cuda/lib64' bazel-bin/tensorflow/${l}
+		dolib.so bazel-bin/tensorflow/${l}
+		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1)
+		dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3)
+	done
+
+	einstalldocs
+
+	# Workaround for https://bugs.gentoo.org/831927
+	export MAKEOPTS="-j1"
+}


^ permalink raw reply related	[flat|nested] 11+ messages in thread

end of thread, other threads:[~2024-02-03 19:35 UTC | newest]

Thread overview: 11+ messages (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2021-10-25  1:11 [gentoo-commits] repo/gentoo:master commit in: sci-libs/tensorflow/files/, sci-libs/tensorflow/ Jason Zaman
  -- strict thread matches above, loose matches on Subject: below --
2024-02-03 19:35 Jason Zaman
2024-02-03 19:35 Jason Zaman
2024-02-03 19:35 Jason Zaman
2020-08-30  0:26 Jason Zaman
2019-12-09  3:03 Jason Zaman
2019-12-02 14:50 Jason Zaman
2019-08-03 12:04 Jason Zaman
2019-06-19  8:41 Jason Zaman
2018-05-20 13:46 Jason Zaman
2018-05-02  7:21 Jason Zaman

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox