From be3f4b543459a8a11ccab9bf25972a86db83e192 Mon Sep 17 00:00:00 2001 From: vbarda Date: Tue, 28 Jan 2025 14:03:04 -0500 Subject: [PATCH 01/14] docs: revamp streaming how-to guides --- ...0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib | 1 - ...0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib | 1 - ...5-a489-47bf-b482-a744a54e2cc4.msgpack.zlib | 1 - ...0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib | 1 - ...9-8922-46ea-bd5b-18264fcc523a.msgpack.zlib | 1 + docs/docs/how-tos/index.md | 5 +- docs/docs/how-tos/stream-updates.ipynb | 186 ------- docs/docs/how-tos/stream-values.ipynb | 248 --------- docs/docs/how-tos/streaming-content.ipynb | 346 ------------ docs/docs/how-tos/streaming/streaming.ipynb | 507 ++++++++++++++++++ docs/docs/how-tos/streaming/streaming.md | 2 + docs/mkdocs.yml | 8 +- 12 files changed, 518 insertions(+), 789 deletions(-) delete mode 100644 docs/cassettes/stream-multiple_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib delete mode 100644 docs/cassettes/stream-updates_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib delete mode 100644 docs/cassettes/stream-values_c122bf15-a489-47bf-b482-a744a54e2cc4.msgpack.zlib delete mode 100644 docs/cassettes/stream-values_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib create mode 100644 docs/cassettes/streaming_c251f809-8922-46ea-bd5b-18264fcc523a.msgpack.zlib delete mode 100644 docs/docs/how-tos/stream-updates.ipynb delete mode 100644 docs/docs/how-tos/stream-values.ipynb delete mode 100644 docs/docs/how-tos/streaming-content.ipynb create mode 100644 docs/docs/how-tos/streaming/streaming.ipynb create mode 100644 docs/docs/how-tos/streaming/streaming.md diff --git a/docs/cassettes/stream-multiple_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib b/docs/cassettes/stream-multiple_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib deleted file mode 100644 index 686c9421cc..0000000000 --- a/docs/cassettes/stream-multiple_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib +++ /dev/null @@ -1 +0,0 @@ -eNrtVwtUFNcZRiRaX+cYj6UGja6r1ajMMrM7+0KpQRCWlwvsIqAiHWbu7g7MzgwzswsrQSMxRWtP7FofxzQ+geURBU1IUFFr9ZiKtiamitEoGo3RaNqqaIz1Qe8si4Fge5LWnpPTZs7ZnXvn/vd////9blm1GwgizbF9ttKsBASClOBEXFlWLYBCFxClJT4nkBwcVZlqtlgrXAJ9erJDkngxMiKC4GkVxwOWoFUk54xwYxGkg5Ai4JhngJ9NZR5Hec70WVSidAJRJOxAVEYq5pYoSQ7KYiU4URbBLRNFheQAiiJAwJegoFmFaFOGK5QCxwCZxiUCQVmaA784OQow8ic7LyE4JxOxcIrBtygJgHDCiY1gRAA/SMDJQ4MklyAzQVWo/I3jmIAOkof3M7e5WL/NMq/H40hFiZIlnH4CO5ByA6rJNBQQSYHmA2TKDBFA5WloAaeAlN2MsHGCk5DJVPI2nhAgP+hh0c+cF6DnBIkGnVOSljz+AWBdsg1zlayHlLdBR8h2dykLjaRZu7K0VPYODBAtAMpP7mfQnZLLywekBClLc0qrHYCgoOQVlQ5OlLz1PQPXQJAkgO4ELMlRkLt3m30BzYcrKGBjCAnUwWCxwO8Wb10BADxCMLQb+Dp3ebcTPM/QpN/QiHyRY7cGgovImvRerpODicBUYCXvrmjRw5JmqEl0QkSqB6YZq8BUuEGFbi9GRImgWQamDcIQUCkf719v7r7AE2QB5IQEUtjr69xc352GE71VKQRptvRgSQikw1tFCE4d/nb374KLlWgn8FbHpPYWF1j8WpxGhWEq444ejGWLvNv8r0j/P8019WACJMGDkBzk5d2M1nc5iwGsXXJ4KzQGTY0ARB4WD3jFB7dJLrGsEgYG/PFwdaCItpiTuiLaFhRaGQuD5N1rdbjCFWqdwgJ4hRpV4wpME6kxRsJBfIp1a0xAjPWJMdlhFQhWtMG4zOzKgWrS4WILAFUX88To75WjD62R1YdVioBinhMBEtDKuzULSe9sH0hC7NudqYZwgp1g6QV+sd5aOaiwXdBsY2AZVoTMEgpHnKK3Qq/D6gMrXf6ug3ahCIYiKLZLLgQS5pisOM8JEiICEjYnyeM9He4kiuUEi9JgWo0ORdGpsBhJxkUBiysvlnNCmeJUBS8AhiOo3cUIbBGAoZ00DIL/P9D4YN5gcDO6szeFxBUAVvTWaNDOZ193EgHIEmQzHjOqNMJnz5OJunipZRqjXr+7J5kIuilUoXOKO3uvB1hsQcWtxV3ECE15T4+Hk9w8TEdhOG606dUaLYnr9BqbDcU0uJqicCOOaRti4pAYgnQAxOLPNm91bPas6JSEmDoL5B3DcQU0WHmmT9/cXNKWm+eMwk35pri4wmQxPxGNp1KLkpOSdW6re47KlY+lR0uFSYVFbFEm5o73wFjp1XqNVm/U4QimQlWYCkPUcXhSIqnDZ7qyioutHqfJJGSrRI3HDTJTku2x5uQkIoPLt+djC2zpeJYUb7LG6RKzhTQcTy9MI2KxBfGZMAPpGaLgzKUTYjFzskFvSYPxhG03KmKqAmYibIpiVKAeEFgPiFwNeCTaVQ1TFZQ/C6JUPRvhVIUJnmxmlvFMhWUE0wnAN2zaFloCUbM4FpxeBX3gctNUVKZHlRprKMKZmOi47ILYNGy2h8pPIeK1CTPU5jzOYHWakrPMVl26ZO/mBLUWQ9CAH3QobvAnz9eq/5tavZuFdC9vxOw/m2AcWU5kaZvNZwECLCFvHclwLgr2dAH4YMzTo7O9jQbSqCEMQKen0DxcbTMgMzPTt3dxe9wMKuUDwX+WL/Z1nkCH+uwbs/xHQf6nL/x1dEjpOSt+iw596Upm1v1Rk2cc2Hyr9Xr5nIxaaWJGRkPtr8nQGyc/qt/7QkLFg7sDTS8vnzM9q/U1u3tam2X/iMPB6n5bPhsf9uYB/d33j5fn7T3/oG1i+fRDH5ffNV+rfXZJecmuxTP/gDbbh0tg7zDfjoEJYVH7D4yd+IV1Lf1G/crbd87V9Q+zbLrmWF3FOw8YfnmKWLXk6MlXs0ZO+iDmvYUhv8Lc1+4e26K6EfTc8tBTia+0tQ8cOzIC63espj74bMnnywrKDoUMrBjgujKnKfFPQeq4y2VTtNrRrbR7g3Ax8fLGwYtS76w92CR0ZM2942m/eb/52o5Hkdf1pjFOroZ5Z0T7uk/IqGb6vRGOtttnB046P6467NyQ9diF+vyRJ05kl/85dEnlpdC3kK+EQfbiwoejHlx/IK5ZcvCtO/NylLtPvE9+dsTaum7975YNTaytahDLz941r5sYNq/5Tumm0pv0z9fT4ZephZv/Sv1GGvoqXTmB37XvZWrpm7UbVB3xRwaNV91f9Hw8zU9yaQfEVrQ82nZhxYbtLUm1U6TWloWOTY2eVZU3J7edmzkp5MqFCH+Q+gYN3GZtiIIRe5r4LrjqqeC7cEX3jayLYbqREPBkgKchXAqguFySYP4VlKNlXKSUiXITOCKNccwqzrQVpTt4JinFlB1rT3dkfFvERwh2lxNqJUtTlszzY615cDwPorN5ylKlDLF6Kq9MkG0mmCLCIypEF8t6ehstG9HDmNxvo/IPKPg/RsFtQUN/wMHfMxzsI/04w3v61vccZvwXAECvO4BOr/9ud4Af/4/eATAj/n94B9AZnvodwJZHGWyYFjOqcTwPYj01ZcBBnpHQGfQUpbOh//QO8BSwJUlReuK7Ycvib2LL1KPsx+jQPdenhE6bu5i6l3Z/zajX558JcSwPGhK82Jc5jMxf+0l6yhv7O0ab3Ftvf3B0tM3dT9O+JuiF5oyCVN/x441tVWszV5zW/m1+x7SOvfce3b41efqnc+c/bMu5UtSkz/9wSnH70gkLa1SDm9qIxuT4MyEnni059ZIxJ7T9cj42cumapqsLsv9CRl6SXk+x0zpz+8XxzEdhw9/5xf2QoAvOnaWxZy4unxOju/finOHHtXV37g148Rl31JbGpatrJtW1mMJ8S/HKRycV0ypeG9Jve8h07e9PbWrJwgbP3i+NVcxs6v/50i9/stEZPHv91X3PuI6tv3HQuPbSmf5th5dNv7T6p1NSnksdZNkYrSzRbi8PuUBcPf/QknBv/7YV4NPn07mvgLvRcunCuHUb303zVrjH7ft7Q83ZvBOtLdVDTmIbqxbgh4+NLV+T+OWwHciUJG2/MV8Ufnhk7aE9WapbhyZEWAVTW59OVDixrOJnc4ODgv4Bu7XJtg== \ No newline at end of file diff --git a/docs/cassettes/stream-updates_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib b/docs/cassettes/stream-updates_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib deleted file mode 100644 index 354fa0926c..0000000000 --- a/docs/cassettes/stream-updates_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib +++ /dev/null @@ -1 +0,0 @@ -eNrtVw1QE1ceD9XxY7SHtn5VPU2jo9Zhw2422SQwnEVAiciHEFABTTe7L8mSzW7cj0AQtCr1i/E01mvPOa1j+BKKqJUqaFFr9aq1c1Ovtgp4eq32avWsttZaHD/ubQgWqnfT3nkznbvuTLL73vu//9fv//+//1ta4wOCyPBcRD3DSUAgKQkOxPVLawSwQAaiVFbtAZKLpysz0rOsFbLAtE52SZJXjImOJr2MlvcCjmS0FO+J9mHRlIuUouG3lwUhNpV2nva3RSxeqPEAUSSdQNTEqPMWaigeyuIkONAUwi0TRbXkAupCQMKXoGY4tejQRKk1As8ChUYWgaApnQdnPDwNWGXK6ZUQPa8QcXCIwbcoCYD0wIGDZEUAJyTg8UKDJFlQmKBaVJnjeTasg+T3hpg7ZC5ks8LrwXeMeqGGIz0hAieQbGHVFBoaiJTAeMNkmmwRQOUZaAGvhpTdjHDwgodUyLTKNi8pQH7Qw2KIuVeAnhMkBnQOKUbyhz4AJys25Gk4P6Vsg45Q7O5SFhrJcE5NaaniHQgQIwA6RB5i0J2StxcASoKUpfNKa1yApKHktZUuXpQCDT2B20FSFIDuBBzF05B7YLuzmPFGqWngYEkJ1EGwOBByS6DODYAXIVnGB6o7dwV2kl4vy1AhQ6MLRJ6rD4OLKJo8vFyngInAUOCkQHO86OeodKhJvCU6ww/DjFNjWr1Ji+4sQkSJZDgWhg3CklCpam9ofX/3BS9JuSEnJBzCgerOzQ3daXgxUJVKUulZPViSAuUKVJGCh9Dv7j4vyJzEeECgJiHjYXHhxe/F4VoM05p39WCsWBTYHnrFhP4Zfm8PJkAS/AjFQ16BrWhDl7NYwDklV6ACN+HbBCB6YfKAZdVwmySLSyshMOD9YzXhJAqmp3Qhek41rDIRghRosbrkKLWOUGcBr1qH6vRqDI/RYzE6o3p6qrU+ISzG+khMdlkFkhMdEJekrhiooVwy5wZ0XcIj0W9R0IfWKOrDLEVAkZcXARLWKlA/B8nsLB+IJXF3Z6ghvOAkOaY4JDZQq4AKywXDNYaXYUYoLKFwxCMGKghC1xBe6fJ3HbQLRTAUQbFmJREoGGOK4l5ekBARULA4Sf5Aa5SHLFICLA7HDDiBomgsTEaKlWmQJdsTeQ+UKcaqvQJgeZLeV4TAEgFYxsNAEEL/4cIH4waDm9Gmhykk3g04MbANRzufA91JBKBIUMx4wKjSDJ+3Hk3UxUun0JiNxn09yUTQTaEKwiM2PbweZhFExfqiLmKEoQOt4+HARqA4qsdQM6kjURLDCZIEJG6kMQzHdDpAGXYkTEMSSMoFkKxQtAVqEuemxadaEuqyIO8EnnczYH1bRC+bjXLY7J44u85kSSGSaOBLNnkNFgJYZKs7e46FNGf70rhsjMhk3LPnxluS0igEM+qMuMGEmowIpkW1mBZDhJlTZ9JTC+OTCzOs1kytEcdRf8IsDz0TeGVzUbxhrm827sglqdl8LptqYnP9WZlTsy2Z/uRsYu60DMouZYo293TAYbifdbKo1hCfm2pzQjxh2Y2LjlXDSIRFUYwL5wMC8wFRskEfg3VlQ6yaDkVBnLZnIYxVJ8OTLZ1j/bEwjWA4AfiGRTuLkUBcGs+B1g3QB7KPoeNcGdqEpDS900dYuDTt9MQUe3zBghmmaXzmLL8hmzTw1lTrTA4neIHv5gRMZ0bQsB8IVG8KBc/3qv+bWu2Zg3RPbyQ9dDZBHDle5BiHozoLCDCFAnUUy8s0rOkCqIaYZ8bPDTSaKDNO2gFBm3SUHjfQSNLszJ1d3B4Ug0rlQAid5UuqO0+goxEtY8v7qUJPL/i7f1/KnOduR4eU3tox6JZ5zL36Zedzc1zPgci802uqRiQd8F08vn4d3RZ18WBk+6qry3Mip56tEL88eTKuqEwV/ezEQ75Tu3IG3L9dvYq7xl/fc+Tz038zlp94d2xh5IY+t0//Ba8vXt5GHDu78eZLtTkDpPzjgx19at+Zb3g/GJV2+u3bBarc7OAl5k3fouCdSSufmpJ8F/3N4MLVeOO6l6eMebZs9PKWvZNSSpZt6v26WJXkZtasS3+1X0RFoi2iANn72xl9X3+GnhGzocXd2n+XalNm8AXryuavmn5VfmPT00ODZ1Z8sbdu1R7y9l3bt2dKht75/MMYf5Ptm1FVt4pRq7w+tSPqeu1rNv243uM9fbk1DUVJl30u7aUylUE+HZRaZBP74oK9558/EJHn+Wu/aw1jW9b+Kbp8/MCczVWfDjlZu2iTbnjdx9M8V5PKMhYcv9qAJl+9vgEEzzW3f9B3X+GWPm3bcvYfrj04aWGrflQSOab2mYEvP31pQ4W9wfmJszKYOL3j+o4PJ63RNtc7l7ygM7819O/nYtjy1SdGriFKRozcJi1Z8MofT8bMbDgUW3w/QsGol+puO/lFHATscbZ3T1Q9lvYuSt19IyezbDcSEh4M8DCES+EmzkaR7L/q5BilLdIoRDY/Zkh3FOjsmCGr2JhakMgyOdkmfYrlxzZ8pOCUPVArRZpmYX6o1cqH3/mwOcvXlGqUDqun8hqLYjPJFpJ+US3KHOd/2GjFiB7G2H6Myr80wf9xE3xONeiXNvhn1gZXU6E2I9D69c+8y/gvnP8PXQEIo/GnXQGG/o9eAXR68//hFYAwPfYrAGrUEWYKw+0mGHR6uwHoTGYKp0xmHUkTegz80yvAY2gtKcphdvy01lL+YWu5Lj39Ajro6LWOIeO29J+RsD91Yd66QbnM0QhfdL+kBtySv2nXZxtX5mw7X/q7KY3eM+WR1zri7p4rGa96L6dgwHuNuTOH59++4dtyfuy9O5dd9+9d6PikYXJjaxRx9MK1d2yzZiccvvat6yIue0qGj159U986IThsdO7wKyWluye/YY8buXnHB0/umpC87xvmtc9O6POqyv9gPlPsXvfRsPlPqV5c+9WvB9UeXtY21XezVjpWP8rafmOiamjLqCFVG5Hy6v6FxJqJ8SNrt+SVFZ76s713yvXK/l9WTKBai3YOrF472HJ2cNPEmPmLt6+ac6t+xdBTscYrNfirN1JXLnp39+KP3SmvtOuCjvRevcojC6T0/oeuNB28T1heurRfXfWdsXLrzjlp8mXhennz7/mKI+W+4aU3zmfM3eq68t2JJz864qxtHkI7jy2o+nTcAB/w1L49Irg5LXfFG9qvi988kbNx1vHnOvp2toTnn08y5z2hUv0D9IO+dQ== \ No newline at end of file diff --git a/docs/cassettes/stream-values_c122bf15-a489-47bf-b482-a744a54e2cc4.msgpack.zlib b/docs/cassettes/stream-values_c122bf15-a489-47bf-b482-a744a54e2cc4.msgpack.zlib deleted file mode 100644 index 3932052e7c..0000000000 --- a/docs/cassettes/stream-values_c122bf15-a489-47bf-b482-a744a54e2cc4.msgpack.zlib +++ /dev/null @@ -1 +0,0 @@ -eNrtVnlUFPcdR63GJg+8a+O5blJFZZadvRfqgbtyCSxyyCGKszO/2R3YOZiZZXc5DILGPDzXJh5NkxhEQB7gASKCmKRJqk+bxkg8yGHtU2uOGkVT4m1/sywGqq8vfc/+0dZ5j535ze/7+3yvz3z5lFbnA16gWGZAHcWIgMdwES6ETaXVPMhzAkFcWUUD0c4SlYmW5JQdTp7qnGEXRU4ICw3FOErBcoDBKAXO0qH5aChux8RQ+Mw5gA+m0soSns8GvFQop4EgYDYgyMNkiwvlOAt9MSJcyF3wyDRBJtqBzAUweONlFCMTSHmITM6zDiDZOAXAy4uXwDc0SwCH9MrGiYiGlYwYuEThXRB5gNFwQWIOAcAXIqA5mJDo5CUQpUIpvWNZhz8G0cP5wEkn48tZwnr4HCYrlDMY7TOwATHbH5pkQwAB5ynObyZPFQAMnoIZsDJo2ScJkuVpTDJTSMc4jId4sMKCD5zjYeV4kQI9S5wSPb4HwDilHBbLGQ8uHYOFkPLuDRYmSTE2eXGxVB3YIIoHhM/cB9DXkrXmAFyElsVLiqvtACOg53MBwyvtrCB6G/q3bjeG4wAWFDA4S0B8b72tgOJCZAQgHZgIamG7GOArjLc2FwAOwRxUPqjqOeXdg3Gcg8J9qYbmCCxT528vIsXy6Hat1E4EkoERvQcjBA+DW2AkETGhiR5INEaGKjQGhXKPGxFEjGIckDiIA4NBVXG+/ba+GxyG50IkxE9ib1XP4Ya+Nqzg3RmP4ZbkfpAYj9u9OzGe1mka+77nnYxI0cBbbUp81J1/80d3agWKKox7+wFLGXnrfbcw3y/FHugHAkTeg+AsxPK+razCWTaXAt7O69nZOJltpWflFESRImo2o3kmY3xaKr3AYcu0KBLjFqaSCsYakZroVhii9NqIGMNCBNWr9GqtAdWqEVShVKAKFElXavQ6nLXFJZvd7pSFeZp5pkQqN7/A6kqwuBx6PpbCWJeiIFbLi04nSeq4NHeCmgdUhDnKlJHNABOKWYEBJXW2eGcBkREJ5jGMzRUug9E58yliFpOftEhPGvhkLjUjzi0wCckuYx7rSTXYEjT2yHQxErXZouKiDersvuEZVQZE6Y9Qp9QYlNLV0EsUB2Bsot27Q21Q1/BA4ODoAGVVsGSiUyithKQEfzxa7R8hFZYFP/J5TKUZEtTbnmJ3hshUOlky4GQqpUojQ9VhGlWYWiuLik+pM/ndpDyWj3tTeIwRSMjJ+b38r8btTiYXELWmxzK/XWI+7KQUPpxRCHBzrAAQf1TeunQkqWd4IjHmxp7PDGF5G8ZQBT633l0SoeGwpJgm/zacBxIkdI7QAiyEBm3w7/RyrRbmpURQJaJED0pjAIfflxQ4x/IiIgAcjmbR4+0MoTG39HHNUsOq62CRw+Eowh1OAiQ7rWaWhj6FcBnHAweLEa1uBA5I4KBoCjbB9+sf+/CbQaUWtTxqIbK5gBG8NWplz3W4rwkPJA9SGg+BKo3wOvR4o14slWRj1Ota+5sJoE9AO3S00PLovh+iQinUuXuNEYrwdr4IF9katUanAoRSpUX1aozQqjGj0mpQW3EdasAwFN1tikRMGG4HSLKPbd5qc0ZCRHyMqTkd6UsbxOKb+HCfYQWGIsmqZMDD1nhrcQfrJOCc5EEVxEqKyPA2GXAj9GWEzNdAt5gRmZ+WtKcX7SHJKqUh6/sPuaKqZ65/OKBt8pqhAb5rEPx78EBMit/QoRx96HJa+tUvtv91QQaPjxg6YGCQ7O2dwfwblz9YcqT7s/XNC089WL+pKabZWrynTv+m50JX+OCVJTXbZe+O+yYjl7j53f2NRbdyJgX9fN3dO6/eu7uk7eNTUXfSU3+9+qN3T9wZpHoJ8rlVO7L9RE7ptvnf5CWuu26w7GsLXhC1/dSYYysWjZ9cFbb2QN44plEZq/gg8rmXg569Pk22cmNHy7BfpK6KCj6aL9Q3fTWhfkT5rZFTlu2fMGPuG6P+lHwJayyJm7khh6sPm55WciZz89rYg5o3T1POpXxF0uY5ty+HX/lb19fh3e+cm63tmsZMml0U3nA7/mT784FdCXuNb42ZfWbN6+TKkoorR+s/2fJxdHFZ98sJQxMXu5AZc9JXdwRtvXYvqHyi4XCZ+lDL8qD7rvttsb+XX53w2pZXFH+/8buDn+7fW3CVkf9M8SoXT3lbu6Z/ay7LPbB3mBg6O3DtqfqunWc21AzpDHnLjD2/a/Dak899tb+a33X5k9zKffMsH97cfSM4XrGqduyKKw6+aGbLOY9jgSJl3el0NCiQSz659QY9sqt4yHvGm89IHRoUUL0M/GEWbNeTlEwDdz4RyRQi63uQcTocfUwwOG7giIVbfmGUjWOOf6WOKElqyCWj7ESPy8AKvDvPAjypxjy7KprkMucnuH+qiMJ4m5OGUUne5IVZPvmSBZ+zoODJkhfLJdXSP3h5jJQz5nBhHkEmOBnG82jSUhL9ksn+KSE/FZZPheVTYfm/LSx1ev2TFZa6/1JhqdIq/w+Fpc7wxIWlWkMqgR4Qep1BaVRZSS2JEyQJgIrACUKj1/4nhaVebdVp/j1h6fxnYZkYkXBh7vBDd8bUG48Er6dH8mWvDX/mxbnHywfNO62kPv/NRMuwDhoZcOnqzPAp5LDCr02pabtqZ48KAPqcwGNNmXFjswo8+z9id08ur3Yd+/x21+r7Z79rdul/+MuVLSbt+NHft85/IbtWbVrCBW++u0I3Yv/5879Fxh4/lAWAceRE+kTgvqnRmYmtW0PLxaVVGctHNxOB+zLfFwYGXDOem1Lx5/Ntme8Xda8Sj9aNT/m2Y1RJ+cUhZoo8XtFe02ked710edy9s/K7EQgSuWfS8pxhZxYW4p3D04d0HHll6RdBLT8sd3V/OdX6ZeOkzJvnDr5weM4l/tOV0buLLlrcdPul5vKabXc3rlhVOq6zeOA1VffVB7sq51Uqon/1/fh7y1pmBOXnjCuSN17QxUY3vX5tzYPy2sDy5vXv0DVBt+rYqWW3pl88m9S1fnBbbfPh935ZMSsuc/WzN8C20EUZeQ2biq+P7ZGEE3nFqsUDAwL+AV1TiAE= \ No newline at end of file diff --git a/docs/cassettes/stream-values_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib b/docs/cassettes/stream-values_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib deleted file mode 100644 index 75cdbb5360..0000000000 --- a/docs/cassettes/stream-values_e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1.msgpack.zlib +++ /dev/null @@ -1 +0,0 @@ -eNrtVwtUFNcZxmjVtj6wBKu06maT+iqzO8Mu+4CsigsIKo9lQUDw0NmZu7sDszPrPNhdlOSI2rQSEsdHmkRjE11AgeDboAnEV3w0QY+B0KKN8ViN71glicZH7J1lUTiYNmntOelp5pzduXfuf//X9////W9ZdTHgeIpl+tRRjAA4nBDghF9WVs2BuSLghUVVLiA4WdKfnmbNXCdyVPtEpyC4+Ri1GndTKtYNGJxSEaxLXYypCScuqOHYTYMAG7+NJX3H+zw7T+kCPI87AK+MUeTNUxIslMUIcKL0wC3jeIXgBAoPwOGLU1CMgrcrIxVKjqWBTCPygFOWzoFfXCwJaPmTwy0gWlYmYuAUg29e4ADughM7TvMAfhCAyw0NEkROZoKqUPkby9JBHQSfO8DcLjIBm2Ve98cxinlKBncFCBxAKAiqJtOQgCc4yh0kU2bxACpPQQtYBaTsZoSd5Vy4TKaSt7lxDvKDHuYDzN0c9BwnUKBzSlCCLzAAjCjbkKdkfIS8DTpCtrtLWWgkxTiUpaWydyBAFAfIAHmAQXdK1lYICAFSls4prXYCnISSX/Q7WV6Q6nsCtxEnCADdCRiCJSF36U1HCeWOVJDATuMCqIFgMSDgFqmmCAA3gtNUMajq3CVtwt1umiIChqoLeZapC4KLyJr0Xq6RwURgKDCCtDOO9zFEGtQkLlmd7oNhxigwldagQjd5EV7AKYaGYYPQOFSqyh1Yf7v7ghsniiAnJBjCUlXn5vruNCwvVabgRJq1B0ucI5xSJc65dNqt3b9zIiNQLiBVm9N7iwsuPhCnUWGYyri5B2PZIunNwCsm8E+xb/VgAgTOhxAs5CW9gdZ3OYsGjENwSus0Bs16DvBumDxgYRXcJoh8mR8CAz44VB1MorVpM7oQPRky3B8PQZIaM51ipCJKp7ACtyIKjdIqME2MNipGo1FMS8msMwfFZD4Uk82ZHM7wdohLQlcMVBNOkSkCZI35oeg3yuhDa2T1YZYiwOtmeYAEtZLqcpCMzvKBJMdv7Qw1hOUcOEOVBMRKG2RQYbmgmG3BZZgRMksoHHHx0rqoKG19cKXL3zXQLhTBUATFdsqJQMAYkxV3s5yA8ICAxUnwSe2RLtwrB5hJg0VrdCiKxsJkJGiRBFbRFs+6oEw+VuHmAM3i5C4vAksEoCkXBUEI/AcLH4wbDG5GG3pTCGwRYHhpvQbtfJq6k3BAliCbcZ+R3wifdx5O1MUrSqYx6nW7epLxoJtC63QuvqH3epDFWpSv83YRIxQptT8FJwV2QOIaoNXYSKM2Okqnx0jSoDGSej2J43aD1r7RnIiYccIJEGsg2qTq+NzUuJRkc40V8jazbBEFlh3v07eggLAX2FymwpJpdgGLj8fmmo0p2VmuGbRjdpoqfaYly65ibHFZ6V6VYZo+Oi7ZYEEwfZReE22AOCCYClVhKgzJQbV6HcE6Zlrjvd5My1ztVHM6VVRcYvOkpnloPTedwlmPqmR6NCeIot2uc2d7UzUcoOLip5lzCxhgxnAbMGB2nSNFLCFzE8FUhnF4IJ6w7JrUsQoYibAo8qZgPiAwHxA5G7QxWFc2xCrIQBSYVD0LYawiCZ5saQzti4VpBMMJwDcs2lZKAKZUlgHtK6APxGKKNDHFGbP0dgNndWflzvTyTKrVY5zL+rIMjlStMzFHSMQcjmkzkwyagu5OMEYZEDToBx2qNQSC54Hq/6ZWO3KQ7umNpAXOJogjw/IMZbdXWQEHU0iqIWhWJGFN50AVxDwjLlfaZiCMGpw0GDG7jcT0uBFJyM7Y1MXtfjHwywdC4CxfUNV5Ar3Xp3FM+cCQwNMX/u7dEzL2LDmBhjbe/vX+1X+zxWKqpu300BO6ytCfXGGWWFZsXlcROeHDhpSBX92wxW5ufKN2YWjH5yefNjUbfzPozHCuv+1gcuusxWMmNX0w+Z40+aWDJep355tGzRp6Z+nvDpCTs/mXtw/a6ouL07fF4oW1S/3HTJG/5PbOij6yjF7T9GKLb0CrvbymMuulvYt/vHx87iJLxIHsOZs1Ga8/t9o04Hms+LMv29aePD2MG97MrIxoPBc+dVK+bdBqi+tHDTc+DPtzv63DHk8Mu9i2nVnxagj5/MR+2YX0Xzd2nHn60/0rJw45a22LvFFbYGmdf3sN8se/37y5oYQee7fkwvIrg1ZWDDdPCpu//pyneUTf5ksv5D0R/qzUMuTjUa9hp+oLC1tbZz/XEbHI3xJxPfomN/jnXvHuz+5cvTNv2aJ9W67lz3lyV5va/OmfMtteqXi3PHT6hspPwofsvLomKkXbIF66NaXjYsvuGGvexEOXLW9todrK9xoynhyhrleMPF1Y57iW57eV7wc3Nu4b/4Wqsj5xgRNtbwy/czisLWGCuPhITPvgwSt2JIWtWVnuDHMpGvrdC2DUN4SqnfC5CQL2KNu7xyofSXsXqei+kRFpuhsJDg8GeBjCpWATV0Dg9D/r5Ci5LVLKRAVz7bP1YkqRNRdFxdR0fcps4E1OcCSL37bhwzmH6IJaydKU8/IDrVY+HOfD5ixfWaqUO6yeyiuTZZtx2oP7eAUvMoyvt9GyET2MKfg2Kv/QBP/HTfDJkNAf2uDvWRtcRQTaDKn9+ve8y/gvnP+9rgA6vf67XQHC/8UVQPs/egXQYYb/wyuAzvDIrwBAYyTsei1qx3QaTAcwVGeLthkxGJYkqtFqtd94BXgErSXQE1r0u7WW8x+0lsJSE3NiSujieWtsW7aPSF/hPHzxtFJl273YNeL3eGjtR0vzHca3/zJy77Ohp37hb7bMH12gyB6YdHit1d9+uvT26JOrzvWb0l4xe0FUxfCW/n94xrhzVWnsXTztaPvRi7Wa2ULSJ2fPXLizq7nV77n5xagIn6dU1zR4yHslx/Ewy0+/3NCS0PRy+PsUNmbKJf35krxRT712W3v0uu/V8AvCDr3uZAS5vWxVv5BTPu/tJdv2DNheRl+v33aoriizdt+4kPDGS+GVryDlYy3Lh0WklWm3NdGhn1lmLBwWuWfx2JsR0SPfP7AQnHrh/LWwd+4+81XHrUtE4v4a4uO014/sbGwyreCahhzb6Ds6tAQc5qyJrZc3qHefXhhxXtenYdQBz73Llidu6ZImfD0uY0zD6qunW71XhqV2mB5XH6+4de3YvZln+yfkHRwzZH3x3ciPiCnF4411mXcOlblyc2uw3zo9N+qm5iIdNTmOBHvmlpSvH+vsDA2/0ozOgeN/APtuyGc= \ No newline at end of file diff --git a/docs/cassettes/streaming_c251f809-8922-46ea-bd5b-18264fcc523a.msgpack.zlib b/docs/cassettes/streaming_c251f809-8922-46ea-bd5b-18264fcc523a.msgpack.zlib new file mode 100644 index 0000000000..f09a9858b1 --- /dev/null +++ b/docs/cassettes/streaming_c251f809-8922-46ea-bd5b-18264fcc523a.msgpack.zlib @@ -0,0 +1 @@ +eNrtmE9vG8cVwBv05g/QUw6bRU4Fh9wllyKXAlHIkiVZjkRZdCxLsUEMZx+5Y+7ObHZmJVGGEdTtvdii1x4ay2IjqE4CG2maNj330C+gHPJZ8pakIgk2RCA5ZvdAanfee/Pm9/6t+Gy0B7HiUrxzyoWGmDKNNyp9Norh4wSU/uNxCNqX3tFmq33veRLzs9/6WkeqUSrRiBdlBILyIpNhac8uMZ/qEv4dBTA2c9SV3vDskydmCErRPiizYXz0xGQStxIab8wVELipBoMaj+UAv7oy0QZnYLAYaGhQ4RmMamUWDDOWAWQ6iYLYfPoIn4TSgyB71I80cSQJueCZpNKZMi7oOIGnIx+oh6f805EvlU5fXvX7c8oYoDoIJj0u+uk/+oc8Khge9AJ07ASdFTCmkp4MACJCA74HxxOt9AsaRQFHD3G99FhJcTo9HNHDCN5cPsmcJ4hC6PR1C51YuF3aHCJgYdjFqlusfXFAlKZcBEiMBBT9OY7G6/++vBBRNkAjZBq89Hii/PKyjFTpi3XKWu0rJmnM/PQFjcM559Xl53EiNA8hHS1uvrnddPFiu0rRtovul1cMq6Fg6YseDRT884oy6HhImEQb6d+sl+d8AhB97afP7XLt7zGoCNMF/nCMajpRz44wFvD//42mefNp6855EL//1W+OljAu6bfb4BWMsmusUWGUrXLVsMoNq94o14yV9Xuni9Nt7mVhODM0HOgS7GVPJskxb2Cyxgp0M9E9Uv/yXkyF6mFsbp3nwYj5iRiAd7L41gz4NssAPF52HsxMAgeRVECmbqanD8jWpILI7aVXk3QjMu5TwQ/H6ZB+lkUXneDi9XQ5imVmEjcnoUIyrvtyunIO/gQPahHbIpb9zQHJCifgIUeY489pyWLcKxZeX78pobHGsLo/s6vW5PrvZZkYQvQm2/7CUtnF6z9vl/rRmpMJubXKN1flkO6FpeflUH395vrUxqeWOj04FybcS8/ex5tOveq4Nafes8qU0WrdpS6tMNazuhUL6l6l/i8MJmdoJYteJGOMLjBsUnqYnhVCepBVWrNiVytzeNZ5gwsWJB60k+6SzA6h5o0ohkBS73PWI4wyH8gkA9PR0s7GwvrtxZM2Orko5YDDn79759edDut1umGzdbhuu4LHq3Hl5s6HC6xY/SBYbm3rOdiUD3Z3N3fb24PHMtipA4arVqnb9lzVqhG7aBXtok3WPH8Yxjsd5fhVFa8UB6pTvv+x61rifnvzsAcDa1h0ltfa3v5dtz7UnX16p7LPBvFgZytkc4uH27v3wwUKC2yvtnJns7okdwPm9vE0VPvN0ryByciRb3NaIwRrhEwqpHJeIfOGN2bQLF7th/PGKvb3lgiG80Y7gwn4TUNocw3NDSng7C/IINnjXnNjbWVV1pyE8+0V7h3W6OJwdf+g5fPlrf7NjTVh7ZRtVluSSSQvQXAsh1hTDnOWUx/n4YXrP9Grrx6QyyVPWtFkkI2EVIL3esdtiLGK0hMWyMTD1h7D8eIy2VrYSV+7llvF/KoxsHvlylyX3NreGtEAk2mPpa/8StNsOE7FnDdC2qzPOZY1nmu/P86ST/S/e/evHtW0YTwxuWc2zGwIMhyBZCGJhbt+l7ZWHjgfVla39tmQ3nWGor6ws7xlFkzZfYxtZapRvBibxXHjQYFsBGpAmxfwCucz7/LII9gVkCmmVx21cMDs4QDtaI5jsmHiHKNJoLOFodIQdnroM8QRup7t3Ys6tTJ4NdqtOizb05eojIMa5zQXHhyYDauARgJNzcaT6Qw2KXYpLBSRmf1xmpt4E0MvURT9E0kQPC2YgexjV+uqyYOCiZtz5XfwYDgMp1KPnt648csheIFr2x+aOaJrERkenjZndD0j7UPOaAYjfAXPGc1gpHjOaBYjJJAjytvRz2WEYjmjWS17/BtOTmkGJfzfJ4d0PaTf5YBmZNFD8TAfbDMg3QRGE5UX28zZlr9FzkK0T0XmXI5pxpukzBHNQNTN+9EsRDQnNGv6mzmi6xFFdD9HdD0ionJC1xPiLMi79QxGD833ckRvQTSbiam0jMxLVD5aam3cenTjxg8QdyN4 \ No newline at end of file diff --git a/docs/docs/how-tos/index.md b/docs/docs/how-tos/index.md index 7cc8f33b36..dd4ac57cf0 100644 --- a/docs/docs/how-tos/index.md +++ b/docs/docs/how-tos/index.md @@ -81,12 +81,9 @@ See the below guides for how-to implement human-in-the-loop workflows with the ( [Streaming](../concepts/streaming.md) is crucial for enhancing the responsiveness of applications built on LLMs. By displaying output progressively, even before a complete response is ready, streaming significantly improves user experience (UX), particularly when dealing with the latency of LLMs. -- [How to stream full state of your graph](stream-values.ipynb) -- [How to stream state updates of your graph](stream-updates.ipynb) +- [How to stream graph outputs](streaming/streaming.ipynb) - [How to stream LLM tokens](streaming-tokens.ipynb) - [How to stream LLM tokens without LangChain models](streaming-tokens-without-langchain.ipynb) -- [How to stream custom data](streaming-content.ipynb) -- [How to configure multiple streaming modes at the same time](stream-multiple.ipynb) - [How to stream events from within a tool](streaming-events-from-within-tools.ipynb) - [How to stream events from within a tool without LangChain models](streaming-events-from-within-tools-without-langchain.ipynb) - [How to stream events from the final node](streaming-from-final-node.ipynb) diff --git a/docs/docs/how-tos/stream-updates.ipynb b/docs/docs/how-tos/stream-updates.ipynb deleted file mode 100644 index a241b22821..0000000000 --- a/docs/docs/how-tos/stream-updates.ipynb +++ /dev/null @@ -1,186 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "3631f2b9-aa79-472e-a9d6-9125a90ee704", - "metadata": {}, - "source": [ - "# How to stream state updates of your graph" - ] - }, - { - "cell_type": "markdown", - "id": "858c7499-0c92-40a9-bd95-e5a5a5817e92", - "metadata": {}, - "source": [ - "LangGraph supports multiple streaming modes. The main ones are:\n", - "\n", - "- `values`: This streaming mode streams back values of the graph. This is the **full state of the graph** after each node is called.\n", - "- `updates`: This streaming mode streams back updates to the graph. This is the **update to the state of the graph** after each node is called.\n", - "\n", - "This guide covers `stream_mode=\"updates\"`." - ] - }, - { - "cell_type": "markdown", - "id": "7c2f84f1-0751-4779-97d4-5cbb286093b7", - "metadata": {}, - "source": [ - "## Setup\n", - "\n", - "First, let's install the required package and set our API keys" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "6b4285e4-7434-4971-bde0-aabceef8ee7e", - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "%pip install -U langgraph langchain-openai langchain-community" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f7f9f24a-e3d0-422b-8924-47950b2facd6", - "metadata": {}, - "outputs": [], - "source": [ - "import getpass\n", - "import os\n", - "\n", - "\n", - "def _set_env(var: str):\n", - " if not os.environ.get(var):\n", - " os.environ[var] = getpass.getpass(f\"{var}: \")\n", - "\n", - "\n", - "_set_env(\"OPENAI_API_KEY\")" - ] - }, - { - "cell_type": "markdown", - "id": "cc6c48fe", - "metadata": {}, - "source": [ - "
\n", - "

Set up LangSmith for LangGraph development

\n", - "

\n", - " Sign up for LangSmith to quickly spot issues and improve the performance of your LangGraph projects. LangSmith lets you use trace data to debug, test, and monitor your LLM apps built with LangGraph — read more about how to get started here. \n", - "

\n", - "
" - ] - }, - { - "cell_type": "markdown", - "id": "2e7777f9", - "metadata": {}, - "source": [ - "## Define the graph\n", - "\n", - "We'll be using a simple ReAct agent for this guide." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "85cf2e23-29f2-40cc-b302-5377b3b49da9", - "metadata": {}, - "outputs": [], - "source": [ - "from typing import Literal\n", - "from langchain_community.tools.tavily_search import TavilySearchResults\n", - "from langchain_core.runnables import ConfigurableField\n", - "from langchain_core.tools import tool\n", - "from langchain_openai import ChatOpenAI\n", - "from langgraph.prebuilt import create_react_agent\n", - "\n", - "\n", - "@tool\n", - "def get_weather(city: Literal[\"nyc\", \"sf\"]):\n", - " \"\"\"Use this to get weather information.\"\"\"\n", - " if city == \"nyc\":\n", - " return \"It might be cloudy in nyc\"\n", - " elif city == \"sf\":\n", - " return \"It's always sunny in sf\"\n", - " else:\n", - " raise AssertionError(\"Unknown city\")\n", - "\n", - "\n", - "tools = [get_weather]\n", - "\n", - "model = ChatOpenAI(model_name=\"gpt-4o\", temperature=0)\n", - "graph = create_react_agent(model, tools)" - ] - }, - { - "cell_type": "markdown", - "id": "956db549-5207-4be1-a823-78311738e3f8", - "metadata": {}, - "source": [ - "## Stream updates" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Receiving update from node: 'agent'\n", - "{'messages': [AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_kc6cvcEkTAUGRlSHrP4PK9fn', 'function': {'arguments': '{\"city\":\"sf\"}', 'name': 'get_weather'}, 'type': 'function'}]}, response_metadata={'token_usage': {'completion_tokens': 14, 'prompt_tokens': 57, 'total_tokens': 71}, 'model_name': 'gpt-4o-2024-05-13', 'system_fingerprint': 'fp_3e7d703517', 'finish_reason': 'tool_calls', 'logprobs': None}, id='run-cd68b3a0-86c3-4afa-9649-1b962a0dd062-0', tool_calls=[{'name': 'get_weather', 'args': {'city': 'sf'}, 'id': 'call_kc6cvcEkTAUGRlSHrP4PK9fn'}], usage_metadata={'input_tokens': 57, 'output_tokens': 14, 'total_tokens': 71})]}\n", - "\n", - "\n", - "\n", - "Receiving update from node: 'tools'\n", - "{'messages': [ToolMessage(content=\"It's always sunny in sf\", name='get_weather', tool_call_id='call_kc6cvcEkTAUGRlSHrP4PK9fn')]}\n", - "\n", - "\n", - "\n", - "Receiving update from node: 'agent'\n", - "{'messages': [AIMessage(content='The weather in San Francisco is currently sunny.', response_metadata={'token_usage': {'completion_tokens': 10, 'prompt_tokens': 84, 'total_tokens': 94}, 'model_name': 'gpt-4o-2024-05-13', 'system_fingerprint': 'fp_3e7d703517', 'finish_reason': 'stop', 'logprobs': None}, id='run-009d83c4-b874-4acc-9494-20aba43132b9-0', usage_metadata={'input_tokens': 84, 'output_tokens': 10, 'total_tokens': 94})]}\n", - "\n", - "\n", - "\n" - ] - } - ], - "source": [ - "inputs = {\"messages\": [(\"human\", \"what's the weather in sf\")]}\n", - "async for chunk in graph.astream(inputs, stream_mode=\"updates\"):\n", - " for node, values in chunk.items():\n", - " print(f\"Receiving update from node: '{node}'\")\n", - " print(values)\n", - " print(\"\\n\\n\")" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.9" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/docs/how-tos/stream-values.ipynb b/docs/docs/how-tos/stream-values.ipynb deleted file mode 100644 index 74a7962407..0000000000 --- a/docs/docs/how-tos/stream-values.ipynb +++ /dev/null @@ -1,248 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "3631f2b9-aa79-472e-a9d6-9125a90ee704", - "metadata": {}, - "source": [ - "# How to stream full state of your graph" - ] - }, - { - "cell_type": "markdown", - "id": "858c7499-0c92-40a9-bd95-e5a5a5817e92", - "metadata": {}, - "source": [ - "LangGraph supports multiple streaming modes. The main ones are:\n", - "\n", - "- `values`: This streaming mode streams back values of the graph. This is the **full state of the graph** after each node is called.\n", - "- `updates`: This streaming mode streams back updates to the graph. This is the **update to the state of the graph** after each node is called.\n", - "\n", - "This guide covers `stream_mode=\"values\"`." - ] - }, - { - "cell_type": "markdown", - "id": "7c2f84f1-0751-4779-97d4-5cbb286093b7", - "metadata": {}, - "source": [ - "## Setup\n", - "\n", - "First, let's install the required packages and set our API keys" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "6b4285e4-7434-4971-bde0-aabceef8ee7e", - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "%pip install -U langgraph langchain-openai langchain-community" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f7f9f24a-e3d0-422b-8924-47950b2facd6", - "metadata": {}, - "outputs": [], - "source": [ - "import getpass\n", - "import os\n", - "\n", - "\n", - "def _set_env(var: str):\n", - " if not os.environ.get(var):\n", - " os.environ[var] = getpass.getpass(f\"{var}: \")\n", - "\n", - "\n", - "_set_env(\"OPENAI_API_KEY\")" - ] - }, - { - "cell_type": "markdown", - "id": "eaaab1fc", - "metadata": {}, - "source": [ - "
\n", - "

Set up LangSmith for LangGraph development

\n", - "

\n", - " Sign up for LangSmith to quickly spot issues and improve the performance of your LangGraph projects. LangSmith lets you use trace data to debug, test, and monitor your LLM apps built with LangGraph — read more about how to get started here. \n", - "

\n", - "
" - ] - }, - { - "cell_type": "markdown", - "id": "7939a3c5", - "metadata": {}, - "source": [ - "## Define the graph\n", - "\n", - "We'll be using a simple ReAct agent for this guide." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "ef5a3ec6-0cd0-4541-ab1b-d63ede22720e", - "metadata": {}, - "outputs": [], - "source": [ - "from typing import Literal\n", - "from langchain_community.tools.tavily_search import TavilySearchResults\n", - "from langchain_core.runnables import ConfigurableField\n", - "from langchain_core.tools import tool\n", - "from langchain_openai import ChatOpenAI\n", - "from langgraph.prebuilt import create_react_agent\n", - "\n", - "\n", - "@tool\n", - "def get_weather(city: Literal[\"nyc\", \"sf\"]):\n", - " \"\"\"Use this to get weather information.\"\"\"\n", - " if city == \"nyc\":\n", - " return \"It might be cloudy in nyc\"\n", - " elif city == \"sf\":\n", - " return \"It's always sunny in sf\"\n", - " else:\n", - " raise AssertionError(\"Unknown city\")\n", - "\n", - "\n", - "tools = [get_weather]\n", - "\n", - "model = ChatOpenAI(model_name=\"gpt-4o\", temperature=0)\n", - "graph = create_react_agent(model, tools)" - ] - }, - { - "cell_type": "markdown", - "id": "002a715b-e0be-4e89-8d42-f0098882586b", - "metadata": {}, - "source": [ - "## Stream values" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "e9e9ffb0-2cd5-466f-b70b-b6ed51b852d1", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "================================\u001b[1m Human Message \u001b[0m=================================\n", - "\n", - "what's the weather in sf\n", - "==================================\u001b[1m Ai Message \u001b[0m==================================\n", - "Tool Calls:\n", - " get_weather (call_61VvIzqVGtyxcXi0z6knZkjZ)\n", - " Call ID: call_61VvIzqVGtyxcXi0z6knZkjZ\n", - " Args:\n", - " city: sf\n", - "=================================\u001b[1m Tool Message \u001b[0m=================================\n", - "Name: get_weather\n", - "\n", - "It's always sunny in sf\n", - "==================================\u001b[1m Ai Message \u001b[0m==================================\n", - "\n", - "The weather in San Francisco is currently sunny.\n" - ] - } - ], - "source": [ - "inputs = {\"messages\": [(\"human\", \"what's the weather in sf\")]}\n", - "async for chunk in graph.astream(inputs, stream_mode=\"values\"):\n", - " chunk[\"messages\"][-1].pretty_print()" - ] - }, - { - "cell_type": "markdown", - "id": "d73de237-bf45-4fa7-93ef-6dae7eacffc0", - "metadata": {}, - "source": [ - "If we want to just get the final result, we can use the same method and just keep track of the last value we received" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "c122bf15-a489-47bf-b482-a744a54e2cc4", - "metadata": {}, - "outputs": [], - "source": [ - "inputs = {\"messages\": [(\"human\", \"what's the weather in sf\")]}\n", - "async for chunk in graph.astream(inputs, stream_mode=\"values\"):\n", - " final_result = chunk" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "316022e5-4c65-48e4-9878-8d94a2425ed4", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'messages': [HumanMessage(content=\"what's the weather in sf\", id='54b39b6f-054b-4306-980b-86905e48a6bc'),\n", - " AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_avoKnK8reERzTUSxrN9cgFxY', 'function': {'arguments': '{\"city\":\"sf\"}', 'name': 'get_weather'}, 'type': 'function'}]}, response_metadata={'token_usage': {'completion_tokens': 14, 'prompt_tokens': 57, 'total_tokens': 71}, 'model_name': 'gpt-4o-2024-05-13', 'system_fingerprint': 'fp_5e6c71d4a8', 'finish_reason': 'tool_calls', 'logprobs': None}, id='run-f2f43c89-2c96-45f4-975c-2d0f22d0d2d1-0', tool_calls=[{'name': 'get_weather', 'args': {'city': 'sf'}, 'id': 'call_avoKnK8reERzTUSxrN9cgFxY'}], usage_metadata={'input_tokens': 57, 'output_tokens': 14, 'total_tokens': 71}),\n", - " ToolMessage(content=\"It's always sunny in sf\", name='get_weather', id='fc18a798-c7b2-4f73-84fa-8ffdffb6ddcb', tool_call_id='call_avoKnK8reERzTUSxrN9cgFxY'),\n", - " AIMessage(content='The weather in San Francisco is currently sunny. Enjoy the sunshine!', response_metadata={'token_usage': {'completion_tokens': 14, 'prompt_tokens': 84, 'total_tokens': 98}, 'model_name': 'gpt-4o-2024-05-13', 'system_fingerprint': 'fp_5e6c71d4a8', 'finish_reason': 'stop', 'logprobs': None}, id='run-21418147-da8e-4738-a076-239377397c40-0', usage_metadata={'input_tokens': 84, 'output_tokens': 14, 'total_tokens': 98})]}" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "final_result" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "0f64ebbe-535c-4b35-a95f-0a7490cfed90", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "==================================\u001b[1m Ai Message \u001b[0m==================================\n", - "\n", - "The weather in San Francisco is currently sunny. Enjoy the sunshine!\n" - ] - } - ], - "source": [ - "final_result[\"messages\"][-1].pretty_print()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.9" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/docs/how-tos/streaming-content.ipynb b/docs/docs/how-tos/streaming-content.ipynb deleted file mode 100644 index cc983fb5ff..0000000000 --- a/docs/docs/how-tos/streaming-content.ipynb +++ /dev/null @@ -1,346 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "15c4bd28", - "metadata": {}, - "source": [ - "# How to stream custom data\n", - "\n", - "
\n", - "

Prerequisites

\n", - "

\n", - " This guide assumes familiarity with the following:\n", - "

\n", - "

\n", - "
\n", - "\n", - "The most common use case for streaming from inside a node is to stream LLM tokens, but you may also want to stream custom data.\n", - "\n", - "For example, if you have a long-running tool call, you can dispatch custom events between the steps and use these custom events to monitor progress. You could also surface these custom events to an end user of your application to show them how the current task is progressing.\n", - "\n", - "You can do so in two ways:\n", - "* using graph's `.stream` / `.astream` methods with `stream_mode=\"custom\"`\n", - "* emitting custom events using [adispatch_custom_events](https://python.langchain.com/docs/how_to/callbacks_custom_events/).\n", - "\n", - "Below we'll see how to use both APIs.\n", - "\n", - "## Setup\n", - "\n", - "First, let's install our required packages" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "e1a20f31", - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "%pip install -U langgraph" - ] - }, - { - "cell_type": "markdown", - "id": "12297071", - "metadata": {}, - "source": [ - "
\n", - "

Set up LangSmith for LangGraph development

\n", - "

\n", - " Sign up for LangSmith to quickly spot issues and improve the performance of your LangGraph projects. LangSmith lets you use trace data to debug, test, and monitor your LLM apps built with LangGraph — read more about how to get started here. \n", - "

\n", - "
" - ] - }, - { - "cell_type": "markdown", - "id": "29814253-ca9b-4844-a8a5-d6b19fbdbdba", - "metadata": {}, - "source": [ - "## Stream custom data using `.stream / .astream`" - ] - }, - { - "cell_type": "markdown", - "id": "b729644a-b65f-4e69-ad45-f2e88ffb4e9d", - "metadata": {}, - "source": [ - "### Define the graph" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "9731c40f-5ce7-460d-b2ad-33185529c99d", - "metadata": {}, - "outputs": [], - "source": [ - "from langchain_core.messages import AIMessage\n", - "from langgraph.graph import START, StateGraph, MessagesState, END\n", - "from langgraph.types import StreamWriter\n", - "\n", - "\n", - "async def my_node(\n", - " state: MessagesState,\n", - " writer: StreamWriter, # <-- provide StreamWriter to write chunks to be streamed\n", - "):\n", - " chunks = [\n", - " \"Four\",\n", - " \"score\",\n", - " \"and\",\n", - " \"seven\",\n", - " \"years\",\n", - " \"ago\",\n", - " \"our\",\n", - " \"fathers\",\n", - " \"...\",\n", - " ]\n", - " for chunk in chunks:\n", - " # write the chunk to be streamed using stream_mode=custom\n", - " writer(chunk)\n", - "\n", - " return {\"messages\": [AIMessage(content=\" \".join(chunks))]}\n", - "\n", - "\n", - "# Define a new graph\n", - "workflow = StateGraph(MessagesState)\n", - "\n", - "workflow.add_node(\"model\", my_node)\n", - "workflow.add_edge(START, \"model\")\n", - "workflow.add_edge(\"model\", END)\n", - "\n", - "app = workflow.compile()" - ] - }, - { - "cell_type": "markdown", - "id": "ecd69eed-9624-4640-b0af-c9f82b190900", - "metadata": {}, - "source": [ - "### Stream content" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "00a91b15-82c7-443c-acb6-a7406df15cee", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Four\n", - "score\n", - "and\n", - "seven\n", - "years\n", - "ago\n", - "our\n", - "fathers\n", - "...\n" - ] - } - ], - "source": [ - "from langchain_core.messages import HumanMessage\n", - "\n", - "inputs = [HumanMessage(content=\"What are you thinking about?\")]\n", - "async for chunk in app.astream({\"messages\": inputs}, stream_mode=\"custom\"):\n", - " print(chunk, flush=True)" - ] - }, - { - "cell_type": "markdown", - "id": "c7b9f1f0-c170-40dc-9c22-289483dfbc99", - "metadata": {}, - "source": [ - "You will likely need to use [multiple streaming modes](https://langchain-ai.github.io/langgraph/how-tos/stream-multiple/) as you will\n", - "want access to both the custom data and the state updates." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "f8ed22d4-6ce6-4b04-a68b-2ea516e3ab15", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "('custom', 'Four')\n", - "('custom', 'score')\n", - "('custom', 'and')\n", - "('custom', 'seven')\n", - "('custom', 'years')\n", - "('custom', 'ago')\n", - "('custom', 'our')\n", - "('custom', 'fathers')\n", - "('custom', '...')\n", - "('updates', {'model': {'messages': [AIMessage(content='Four score and seven years ago our fathers ...', additional_kwargs={}, response_metadata={})]}})\n" - ] - } - ], - "source": [ - "from langchain_core.messages import HumanMessage\n", - "\n", - "inputs = [HumanMessage(content=\"What are you thinking about?\")]\n", - "async for chunk in app.astream({\"messages\": inputs}, stream_mode=[\"custom\", \"updates\"]):\n", - " print(chunk, flush=True)" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "ca976d6a-7c64-4603-8bb4-dee95428c33d", - "metadata": {}, - "source": [ - "## Stream custom data using `.astream_events`\n", - "\n", - "If you are already using graph's `.astream_events` method in your workflow, you can also stream custom data by emitting custom events using `adispatch_custom_event`\n", - "\n", - "
\n", - "

ASYNC IN PYTHON<=3.10

\n", - "

\n", - "\n", - "LangChain cannot automatically propagate configuration, including callbacks necessary for `astream_events()`, to child runnables if you are running async code in python<=3.10. This is a common reason why you may fail to see events being emitted from custom runnables or tools.\n", - "\n", - "If you are running python<=3.10, you will need to manually propagate the `RunnableConfig` object to the child runnable in async environments. For an example of how to manually propagate the config, see the implementation of the node below with `adispatch_custom_event`.\n", - "\n", - "If you are running python>=3.11, the `RunnableConfig` will automatically propagate to child runnables in async environment. However, it is still a good idea to propagate the `RunnableConfig` manually if your code may run in other Python versions.\n", - "

\n", - "
" - ] - }, - { - "cell_type": "markdown", - "id": "b390a9fe-2d5f-4e82-a1ea-c7c0186b8559", - "metadata": {}, - "source": [ - "### Define the graph" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "486a01a0", - "metadata": {}, - "outputs": [], - "source": [ - "from langchain_core.runnables import RunnableConfig, RunnableLambda\n", - "from langchain_core.callbacks.manager import adispatch_custom_event\n", - "\n", - "\n", - "async def my_node(state: MessagesState, config: RunnableConfig):\n", - " chunks = [\n", - " \"Four\",\n", - " \"score\",\n", - " \"and\",\n", - " \"seven\",\n", - " \"years\",\n", - " \"ago\",\n", - " \"our\",\n", - " \"fathers\",\n", - " \"...\",\n", - " ]\n", - " for chunk in chunks:\n", - " await adispatch_custom_event(\n", - " \"my_custom_event\",\n", - " {\"chunk\": chunk},\n", - " config=config, # <-- propagate config\n", - " )\n", - "\n", - " return {\"messages\": [AIMessage(content=\" \".join(chunks))]}\n", - "\n", - "\n", - "# Define a new graph\n", - "workflow = StateGraph(MessagesState)\n", - "\n", - "workflow.add_node(\"model\", my_node)\n", - "workflow.add_edge(START, \"model\")\n", - "workflow.add_edge(\"model\", END)\n", - "\n", - "app = workflow.compile()" - ] - }, - { - "cell_type": "markdown", - "id": "7dcded03-6776-405e-afae-005a3212d3e4", - "metadata": {}, - "source": [ - "### Stream content" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "ce773a40", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Four|score|and|seven|years|ago|our|fathers|...|" - ] - } - ], - "source": [ - "from langchain_core.messages import HumanMessage\n", - "\n", - "inputs = [HumanMessage(content=\"What are you thinking about?\")]\n", - "async for event in app.astream_events({\"messages\": inputs}, version=\"v2\"):\n", - " tags = event.get(\"tags\", [])\n", - " if event[\"event\"] == \"on_custom_event\" and event[\"name\"] == \"my_custom_event\":\n", - " data = event[\"data\"]\n", - " if data:\n", - " print(data[\"chunk\"], end=\"|\", flush=True)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.4" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/docs/how-tos/streaming/streaming.ipynb b/docs/docs/how-tos/streaming/streaming.ipynb new file mode 100644 index 0000000000..4327f47dda --- /dev/null +++ b/docs/docs/how-tos/streaming/streaming.ipynb @@ -0,0 +1,507 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "76c4b04f-0c03-4321-9d40-38d12c59d088", + "metadata": {}, + "source": [ + "# How to stream graph outputs" + ] + }, + { + "cell_type": "markdown", + "id": "15403cdb-441d-43af-a29f-fc15abe03dcc", + "metadata": {}, + "source": [ + "!!! info \"Prerequisites\"\n", + "\n", + " This guide assumes familiarity with the following:\n", + " \n", + " - [Streaming](../../concepts/streaming/)\n", + " - [Chat Models](https://python.langchain.com/docs/concepts/chat_models/)\n", + "\n", + "Streaming is crucial for enhancing the responsiveness of applications built on LLMs. By displaying output progressively, even before a complete response is ready, streaming significantly improves user experience (UX), particularly when dealing with the latency of LLMs.\n", + "\n", + "LangGraph is built with first class support for streaming. There are several different ways to stream back outputs from a graph run:\n", + "\n", + "- `\"values\"`: Emit all values in the state after each step.\n", + "- `\"updates\"`: Emit only the node name(s) and updates returned by the node(s) after each step.\n", + " If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are emitted separately.\n", + "- `\"custom\"`: Emit custom data using from inside graph nodes via `writer: StreamWriter` kwarg of each node.\n", + "- `\"messages\"`: Emit LLM messages token-by-token together with metadata for the graph node where LLM is invoked.\n", + "- `\"debug\"`: Emit debug events with as much information as possible for each step.\n", + "\n", + "You can stream outputs from the graph by using `.stream()` / `.astream()` methods:\n", + "\n", + "=== \"Sync\"\n", + "\n", + " ```python\n", + " for chunk in graph.stream(inputs, stream_mode=\"updates\"):\n", + " print(chunk)\n", + " ```\n", + "\n", + "=== \"Async\"\n", + "\n", + " ```python\n", + " async for chunk in graph.astream(inputs, stream_mode=\"updates\"):\n", + " print(chunk)\n", + " ```\n", + "\n", + "You can also combine multiple streaming mode by providing a list to `stream_mode` parameter:\n", + "\n", + "=== \"Sync\"\n", + "\n", + " ```python\n", + " for chunk in graph.stream(inputs, stream_mode=[\"updates\", \"custom\"]):\n", + " print(chunk)\n", + " ```\n", + "\n", + "=== \"Async\"\n", + "\n", + " ```python\n", + " async for chunk in graph.astream(inputs, stream_mode=[\"updates\", \"custom\"]):\n", + " print(chunk)\n", + " ```" + ] + }, + { + "cell_type": "markdown", + "id": "9723cf76-6fe4-4b52-829f-3f28712ddcb7", + "metadata": {}, + "source": [ + "## Setup" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "427f8f66-7404-4c7d-a642-af5053b8b28f", + "metadata": {}, + "outputs": [], + "source": [ + "%%capture --no-stderr\n", + "%pip install --quiet -U langgraph langchain_openai" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "03310ce6-e21f-4378-93bf-dd273fdb3e9a", + "metadata": {}, + "outputs": [ + { + "name": "stdin", + "output_type": "stream", + "text": [ + "OPENAI_API_KEY: ········\n" + ] + } + ], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "\n", + "def _set_env(var: str):\n", + " if not os.environ.get(var):\n", + " os.environ[var] = getpass.getpass(f\"{var}: \")\n", + "\n", + "\n", + "_set_env(\"OPENAI_API_KEY\")" + ] + }, + { + "cell_type": "markdown", + "id": "80399508-bad8-43b7-8ec9-4c06ad1774cc", + "metadata": {}, + "source": [ + "
\n", + "

Set up LangSmith for LangGraph development

\n", + "

\n", + " Sign up for LangSmith to quickly spot issues and improve the performance of your LangGraph projects. LangSmith lets you use trace data to debug, test, and monitor your LLM apps built with LangGraph — read more about how to get started here. \n", + "

\n", + "
" + ] + }, + { + "cell_type": "markdown", + "id": "be4adbb2-61e8-4bb7-942d-b4dc27ba71ac", + "metadata": {}, + "source": [ + "Let's define a simple graph with two nodes:" + ] + }, + { + "cell_type": "markdown", + "id": "f6d4c513-1006-4179-bba9-d858fc952169", + "metadata": {}, + "source": [ + "## Define graph" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "faeb5ce8-d383-4277-b0a8-322e713638e4", + "metadata": {}, + "outputs": [], + "source": [ + "from typing import TypedDict\n", + "from langgraph.graph import StateGraph, START\n", + "\n", + "\n", + "class State(TypedDict):\n", + " topic: str\n", + " joke: str\n", + "\n", + "\n", + "def refine_topic(state: State):\n", + " return {\"topic\": state[\"topic\"] + \" and cats\"}\n", + "\n", + "\n", + "def generate_joke(state: State):\n", + " return {\"joke\": f\"This is a joke about {state['topic']}\"}\n", + "\n", + "\n", + "graph = (\n", + " StateGraph(State)\n", + " .add_node(refine_topic)\n", + " .add_node(generate_joke)\n", + " .add_edge(START, \"refine_topic\")\n", + " .add_edge(\"refine_topic\", \"generate_joke\")\n", + " .compile()\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "f9b90850-85bf-4391-b6b7-22ad45edaa3b", + "metadata": {}, + "source": [ + "## stream_mode=\"values\"" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "3daca06a-369b-41e5-8e4e-6edc4d4af3a7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'topic': 'ice cream'}\n", + "{'topic': 'ice cream and cats'}\n", + "{'topic': 'ice cream and cats', 'joke': 'This is a joke about ice cream and cats'}\n" + ] + } + ], + "source": [ + "for chunk in graph.stream(\n", + " {\"topic\": \"ice cream\"},\n", + " # highlight-next-line\n", + " stream_mode=\"values\",\n", + "):\n", + " print(chunk)" + ] + }, + { + "cell_type": "markdown", + "id": "adcb1bdb-f9fa-4d42-87ce-8e25d4290883", + "metadata": {}, + "source": [ + "## stream_mode=\"updates\"" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "eed7d401-37d1-4d15-b6dd-88956fff89e1", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'refine_topic': {'topic': 'ice cream and cats'}}\n", + "{'generate_joke': {'joke': 'This is a joke about ice cream and cats'}}\n" + ] + } + ], + "source": [ + "for chunk in graph.stream(\n", + " {\"topic\": \"ice cream\"},\n", + " # highlight-next-line\n", + " stream_mode=\"updates\",\n", + "):\n", + " print(chunk)" + ] + }, + { + "cell_type": "markdown", + "id": "b9ed9c68-b7c5-4420-945d-84fa33fcf88f", + "metadata": {}, + "source": [ + "## stream_mode=\"debug\"" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "cc6354f6-0c39-49cf-a529-b9c6c8713d7c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'type': 'task', 'timestamp': '2025-01-28T22:06:34.789803+00:00', 'step': 1, 'payload': {'id': 'eb305d74-3460-9510-d516-beed71a63414', 'name': 'refine_topic', 'input': {'topic': 'ice cream'}, 'triggers': ['start:refine_topic']}}\n", + "{'type': 'task_result', 'timestamp': '2025-01-28T22:06:34.790013+00:00', 'step': 1, 'payload': {'id': 'eb305d74-3460-9510-d516-beed71a63414', 'name': 'refine_topic', 'error': None, 'result': [('topic', 'ice cream and cats')], 'interrupts': []}}\n", + "{'type': 'task', 'timestamp': '2025-01-28T22:06:34.790165+00:00', 'step': 2, 'payload': {'id': '74355cb8-6284-25e0-579f-430493c1bdab', 'name': 'generate_joke', 'input': {'topic': 'ice cream and cats'}, 'triggers': ['refine_topic']}}\n", + "{'type': 'task_result', 'timestamp': '2025-01-28T22:06:34.790337+00:00', 'step': 2, 'payload': {'id': '74355cb8-6284-25e0-579f-430493c1bdab', 'name': 'generate_joke', 'error': None, 'result': [('joke', 'This is a joke about ice cream and cats')], 'interrupts': []}}\n" + ] + } + ], + "source": [ + "for chunk in graph.stream(\n", + " {\"topic\": \"ice cream\"},\n", + " # highlight-next-line\n", + " stream_mode=\"debug\",\n", + "):\n", + " print(chunk)" + ] + }, + { + "cell_type": "markdown", + "id": "6791da60-0513-43e6-b445-788dd81683bb", + "metadata": {}, + "source": [ + "## stream_mode=\"messages\"" + ] + }, + { + "cell_type": "markdown", + "id": "1f45d68b-f7ca-4012-96cc-d276a143f571", + "metadata": {}, + "source": [ + "Let's modify the above example to include LLM calls:" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "efa787e1-be4d-433b-a1af-46a9c99ad8f3", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_openai import ChatOpenAI\n", + "\n", + "llm = ChatOpenAI(model=\"gpt-4o-mini\")\n", + "\n", + "\n", + "def generate_joke(state: State):\n", + " # highlight-next-line\n", + " llm_response = llm.invoke(\n", + " # highlight-next-line\n", + " [\n", + " # highlight-next-line\n", + " {\"role\": \"user\", \"content\": f\"Generate a joke about {state['topic']}\"}\n", + " # highlight-next-line\n", + " ]\n", + " # highlight-next-line\n", + " )\n", + " return {\"joke\": llm_response.content}\n", + "\n", + "\n", + "graph = (\n", + " StateGraph(State)\n", + " .add_node(refine_topic)\n", + " .add_node(generate_joke)\n", + " .add_edge(START, \"refine_topic\")\n", + " .add_edge(\"refine_topic\", \"generate_joke\")\n", + " .compile()\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "c251f809-8922-46ea-bd5b-18264fcc523a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Why| did| the| cat| sit| on| the| ice| cream| cone|?\n", + "\n", + "|Because| it| wanted| to| be| a| \"|p|urr|-f|ect|\"| scoop|!| 🍦|🐱|" + ] + } + ], + "source": [ + "for message_chunk, metadata in graph.stream(\n", + " {\"topic\": \"ice cream\"},\n", + " # highlight-next-line\n", + " stream_mode=\"messages\",\n", + "):\n", + " if message_chunk.content:\n", + " print(message_chunk.content, end=\"|\", flush=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "b1912d72-7b68-4810-8b98-d7f3c35fbb6d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'langgraph_step': 2,\n", + " 'langgraph_node': 'generate_joke',\n", + " 'langgraph_triggers': ['refine_topic'],\n", + " 'langgraph_path': ('__pregel_pull', 'generate_joke'),\n", + " 'langgraph_checkpoint_ns': 'generate_joke:568879bc-8800-2b0d-a5b5-059526a4bebf',\n", + " 'checkpoint_ns': 'generate_joke:568879bc-8800-2b0d-a5b5-059526a4bebf',\n", + " 'ls_provider': 'openai',\n", + " 'ls_model_name': 'gpt-4o-mini',\n", + " 'ls_model_type': 'chat',\n", + " 'ls_temperature': 0.7}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "metadata" + ] + }, + { + "cell_type": "markdown", + "id": "0d1ebeda-4498-40e0-a30a-0844cb491425", + "metadata": {}, + "source": [ + "## stream_mode=\"custom\"" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "e3bf6a2b-afe3-4bd3-8474-57cccd994f23", + "metadata": {}, + "outputs": [], + "source": [ + "from langgraph.types import StreamWriter\n", + "\n", + "\n", + "# highlight-next-line\n", + "def generate_joke(state: State, writer: StreamWriter):\n", + " # highlight-next-line\n", + " writer({\"custom_key\": \"Writing custom data while generating a joke\"})\n", + " return {\"joke\": f\"This is a joke about {state['topic']}\"}\n", + "\n", + "\n", + "graph = (\n", + " StateGraph(State)\n", + " .add_node(refine_topic)\n", + " .add_node(generate_joke)\n", + " .add_edge(START, \"refine_topic\")\n", + " .add_edge(\"refine_topic\", \"generate_joke\")\n", + " .compile()\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "2ecfb0b0-3311-46f5-9dc8-6c7853373792", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'custom_key': 'Writing custom data while generating a joke'}\n" + ] + } + ], + "source": [ + "for chunk in graph.stream(\n", + " {\"topic\": \"ice cream\"},\n", + " # highlight-next-line\n", + " stream_mode=\"custom\",\n", + "):\n", + " print(chunk)" + ] + }, + { + "cell_type": "markdown", + "id": "28e67f4d-fcab-46a8-93e2-b7bee30336c1", + "metadata": {}, + "source": [ + "## Multiple streaming modes" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "bf4cab4b-356c-4276-9035-26974abe1efe", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Stream mode: updates\n", + "{'refine_topic': {'topic': 'ice cream and cats'}}\n", + "\n", + "\n", + "Stream mode: custom\n", + "{'custom_key': 'Writing custom data while generating a joke'}\n", + "\n", + "\n", + "Stream mode: updates\n", + "{'generate_joke': {'joke': 'This is a joke about ice cream and cats'}}\n", + "\n", + "\n" + ] + } + ], + "source": [ + "for stream_mode, chunk in graph.stream(\n", + " {\"topic\": \"ice cream\"},\n", + " # highlight-next-line\n", + " stream_mode=[\"updates\", \"custom\"],\n", + "):\n", + " print(f\"Stream mode: {stream_mode}\")\n", + " print(chunk)\n", + " print(\"\\n\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/how-tos/streaming/streaming.md b/docs/docs/how-tos/streaming/streaming.md new file mode 100644 index 0000000000..57ba5d7276 --- /dev/null +++ b/docs/docs/how-tos/streaming/streaming.md @@ -0,0 +1,2 @@ +WARNING: DO NOT MODIFY/DELETE +This is a dummy file needed for mkdocs-redirects, as it is expecting redirects to be markdown files diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 93451afbe2..4bcb4199ab 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -58,6 +58,12 @@ plugins: - autorefs - redirects: redirect_maps: + # lib redirects + 'how-tos/stream-values.md': 'how-tos/streaming/streaming.md' + 'how-tos/stream-updates.md': 'how-tos/streaming/streaming.md' + 'how-tos/streaming-content.md': 'how-tos/streaming/streaming.md' + 'how-tos/stream-multiple.md': 'how-tos/streaming/streaming.md' + # cloud redirects 'cloud/index.md': 'concepts/index.md#langgraph-platform' 'cloud/how-tos/index.md': 'how-tos/index.md#langgraph-platform' 'cloud/concepts/api.md': 'concepts/langgraph_server.md' @@ -138,7 +144,7 @@ nav: - how-tos/review-tool-calls-functional.ipynb - Streaming: - Streaming: how-tos#streaming - - how-tos/stream-values.ipynb + - how-tos/streaming/streaming.ipynb - how-tos/stream-updates.ipynb - how-tos/streaming-tokens.ipynb - how-tos/streaming-tokens-without-langchain.ipynb From 5c599c943d82b268fb404dc816f57e9a90b4fd9e Mon Sep 17 00:00:00 2001 From: vbarda Date: Tue, 28 Jan 2025 21:44:01 -0500 Subject: [PATCH 02/14] update streaming tokens --- ...ns-without-langchain_d6ed3df5.msgpack.zlib | 1 - docs/docs/how-tos/index.md | 1 - .../streaming-tokens-without-langchain.ipynb | 355 ----------- docs/docs/how-tos/streaming-tokens.ipynb | 549 ++++++++++-------- docs/mkdocs.yml | 4 - 5 files changed, 314 insertions(+), 596 deletions(-) delete mode 100644 docs/cassettes/streaming-tokens-without-langchain_d6ed3df5.msgpack.zlib delete mode 100644 docs/docs/how-tos/streaming-tokens-without-langchain.ipynb diff --git a/docs/cassettes/streaming-tokens-without-langchain_d6ed3df5.msgpack.zlib b/docs/cassettes/streaming-tokens-without-langchain_d6ed3df5.msgpack.zlib deleted file mode 100644 index 835e4a8930..0000000000 --- a/docs/cassettes/streaming-tokens-without-langchain_d6ed3df5.msgpack.zlib +++ /dev/null @@ -1 +0,0 @@ -eNrtGUtv48Y52566pxZo7wRRoEAhyqRIPQ0fbMn2yrt+wLLX6yaGMBoOHzbF4c4MtZIXOnTbe8Ggf6C7jpwYziZBgjZJm557KHr3HvIj8gv6DSXFcuzEmyYBspV1oEjN937z05PjDmHcp+GtUz8UhCEs4IG/+eSYkYcx4eKPgzYRHrWPNtYbW89i5p/91hMi4pWZGRT5WRqREPlZTNszHWMGe0jMwH0UkJTMUYvavRe33Mdqm3COXMLVivL6Y5XRgMCdGnPC1IyiYgq8QyF/egQkfsMVP1SER5QWsRmlbbW/B1BtapNAwriR0MxsXhMxa1GJzwUjqA1HgsUEngWlwYiV6EUpKycOU9Uk+Jf3FeWxGqJ2CuAS0fQFaXMJYROOmR+NgNRtTkAcnyuSMFyUgNIDJY6UR56PPSVFUxAjY7Fdv0NCJQoQJllJLkIMuIB5ecpyLBNt7RMsUgAGhmTCJ0OAFPMCKGjoh67a7wOw9IzPiC0VHIHu9fv9vf6xR5ANTP505FEukucXHfQewpiA5UiIqQ3EknfdQz/KKDZxAiTICTghJKldkpMDQiINBaDGYIiVvI+iKPAxkucz+5yGpyOnaVLEy8cn0rcauDwUySfzvBfidZBkvj6z0YNwChUja5Wy+vtdjQvkhwGEhxYgEGoQped/nzyIED4AStooVJPBEPn5JAzlyVurCK83LpBEDHvJW4i1C9aHk7+zOBR+myTH1Y3L7EaH5+zMrGFkyx9cICw1St5Nvyrp1ad/u0CECNbTMAVayV/052NjBSR0hZc8M/PltxnhESQJ+cMA0ETMnxyBY8i//3U8Span63fHHv38tV8d1cBJyWdbXpxRcgWlQSIlp+csxTArllkx8sry6tZpdcRmS/rkTBGkK2ZIR/4yTJFZBVKUcSLmYuFopQ+2GAq5A45aHAfFMfbi8IDYJ9Urw+EzGQ6gntQHslgj3Yhyoo3ETE4faJvDuqHVax8OY0+jzEWhf5jGRvKO9DII4YcfjY4h9iVJYK61efIslys+H52MHXACiuqaoWu68YlMBAxBJwWPKAPFCIaqJHrJWaaNujLi5kwjbxZ0XZ+FfMRBbJNG3KrRNvDks0rESECR/WlXY2DQwG/74JX0Oqp4EEgGIOsfX4YQ9ICEPHk7rw8//5wEYURykGp8SeioDJ9/XA00pmVJmHKx+OlFMPDROZ1nhTb/+PL5iMRTnZ92x8Cabydnv4aHpmWUcpbj6LlSLt8q6wWr6GAzT3KlfLlgO8R5r7qkVRH2iNZIwy85ru2uza/WqycNoF2FCueTN1/c+mmziZ1mqz3nke3OqhEQp1gnnttZXojcneWGvl274xobJcMvlPDd6gG3XWxpRjFXNPMlo5zXjKyeNbKGZtSybYf/jt25t2xhpyCMewtWeTdvrWwtrN912T1DdBa28Wprt75d3L+/3Is7yNMPV3jAgm6D7fv8gRPmmdBrZri+E7RaKw8pfuSCP5Hw5mZmFYhEKIp8bpQgGiSIJtPDqhjj9JhV7DQK5rIXK+Oscgda2noY9GYhryCcCHxDwW5AYZ9boyE5+zPYIO749hxb3Cmaufwhqx/YOOKocUfvGvWdovXQz+srC5F1v5WrGfWDKFicMEIhZ2r6yA4F3SqlwXMu+v8o1V8faJP5rq2n/Qr8GFIe+o4zaBAGKZSc4IDGNhR5Rgbg88353eSjEi6biJQcGRi6g/La4s5m2q1/Pxi2mhc/e2IjgWQP8qHTqLK1Y2js2vzCvnnomGtLW5tBo2Vt3FsiS/nyZjfnL94vqZlxUxtiZM+HgWxaWAAAQyESsnud2ycz7u0XWzvkey4PGLzHocM2HRCLsAikA/JhHARAy6M+lk0TOr0f2qSrVvQMdO9AILUynjNUBNUFSiygZc5HjSEB2dCbGAXBV2kMlYaDZq26mbd7NXelt1RFxvbq5vKCvnBY2wViw+48MVtMjBbjyWJysFARc+M2sAduKjTzvQw0cyfmKBjK08+oAXWhILb4WEBQ2udeE2zGJdUUaq9/+/ar75yvNf2kDScN9viN1GY3RvomIw3nwRszXWOmN9TKTTRdbyZ4/7ox0nVGGr6h3ljpuozrT5uRrtd10owTOr9eW19b3Lt9+/vcwvzkne++hcko51gTQ9UFVHW0fxkHh+QkPahcN029zF5mIqAUOQ+k3S6t5SMZ0yiTEnx17SMjT5kI2+ZLyjRWVyLK5yvFmlCfU3zAMwr3KOEKCm3FjrlQWnEYyuXOzR7r2+yxPn/t5zebrB/ZJmuA08VAcvbFj3wv8AO8sV/a4hVKxrfb4v3ymi1e4f9ki/e0XJjCJV6h9L0v8VoFExMzVyIFU7dIkeAiKkFCWUbZcFp6yfraJd53Xw6VTcPWi1cvh37xn28YzvhaDuWKG739Bw3WoW59YeX+fuPh1v4rtByCKeaH2My8IpY5N0MdBqfp1FzOP1Or+/nAP5X6Z6bW8T0aT63uHupMb8anr603+T51bpdrihu3T5vbUWhPre5yHze9g920ah6HU/sm409vhc9OleIv8XcTFzS66o+m/wIqRUf9 \ No newline at end of file diff --git a/docs/docs/how-tos/index.md b/docs/docs/how-tos/index.md index dd4ac57cf0..9f649ccbbc 100644 --- a/docs/docs/how-tos/index.md +++ b/docs/docs/how-tos/index.md @@ -83,7 +83,6 @@ See the below guides for how-to implement human-in-the-loop workflows with the ( - [How to stream graph outputs](streaming/streaming.ipynb) - [How to stream LLM tokens](streaming-tokens.ipynb) -- [How to stream LLM tokens without LangChain models](streaming-tokens-without-langchain.ipynb) - [How to stream events from within a tool](streaming-events-from-within-tools.ipynb) - [How to stream events from within a tool without LangChain models](streaming-events-from-within-tools-without-langchain.ipynb) - [How to stream events from the final node](streaming-from-final-node.ipynb) diff --git a/docs/docs/how-tos/streaming-tokens-without-langchain.ipynb b/docs/docs/how-tos/streaming-tokens-without-langchain.ipynb deleted file mode 100644 index 697def4e78..0000000000 --- a/docs/docs/how-tos/streaming-tokens-without-langchain.ipynb +++ /dev/null @@ -1,355 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "b23ced4e-dc29-43be-9f94-0c36bb181b8a", - "metadata": {}, - "source": [ - "# How to stream LLM tokens (without LangChain LLMs)" - ] - }, - { - "cell_type": "markdown", - "id": "7044eeb8-4074-4f9c-8a62-962488744557", - "metadata": {}, - "source": [ - "In this example we will stream tokens from the language model powering an agent. We'll be using OpenAI client library directly, without using LangChain chat models. We will also use a ReAct agent as an example." - ] - }, - { - "cell_type": "markdown", - "id": "a37f60af-43ea-4aa6-847a-df8cc47065f5", - "metadata": {}, - "source": [ - "## Setup\n", - "\n", - "First, let's install the required packages and set our API keys" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "47f79af8-58d8-4a48-8d9a-88823d88701f", - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "%pip install -U langgraph openai" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0cf6b41d-7fcb-40b6-9a72-229cdd00a094", - "metadata": {}, - "outputs": [], - "source": [ - "import getpass\n", - "import os\n", - "\n", - "\n", - "def _set_env(var: str):\n", - " if not os.environ.get(var):\n", - " os.environ[var] = getpass.getpass(f\"{var}: \")\n", - "\n", - "\n", - "_set_env(\"OPENAI_API_KEY\")" - ] - }, - { - "cell_type": "markdown", - "id": "1c5bc618", - "metadata": {}, - "source": [ - "
\n", - "

Set up LangSmith for LangGraph development

\n", - "

\n", - " Sign up for LangSmith to quickly spot issues and improve the performance of your LangGraph projects. LangSmith lets you use trace data to debug, test, and monitor your LLM apps built with LangGraph — read more about how to get started here. \n", - "

\n", - "
" - ] - }, - { - "cell_type": "markdown", - "id": "e3d02ebb-c2e1-4ef7-b187-810d55139317", - "metadata": {}, - "source": [ - "## Define model, tools and graph" - ] - }, - { - "cell_type": "markdown", - "id": "3ba684f1-d46b-42e4-95cf-9685209a5992", - "metadata": {}, - "source": [ - "### Define a node that will call OpenAI API" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "d59234f9-173e-469d-a725-c13e0979663e", - "metadata": {}, - "outputs": [], - "source": [ - "from openai import AsyncOpenAI\n", - "from langchain_core.language_models.chat_models import ChatGenerationChunk\n", - "from langchain_core.messages import AIMessageChunk\n", - "from langchain_core.runnables.config import (\n", - " ensure_config,\n", - " get_callback_manager_for_config,\n", - ")\n", - "\n", - "openai_client = AsyncOpenAI()\n", - "# define tool schema for openai tool calling\n", - "\n", - "tool = {\n", - " \"type\": \"function\",\n", - " \"function\": {\n", - " \"name\": \"get_items\",\n", - " \"description\": \"Use this tool to look up which items are in the given place.\",\n", - " \"parameters\": {\n", - " \"type\": \"object\",\n", - " \"properties\": {\"place\": {\"type\": \"string\"}},\n", - " \"required\": [\"place\"],\n", - " },\n", - " },\n", - "}\n", - "\n", - "\n", - "async def call_model(state, config=None):\n", - " config = ensure_config(config | {\"tags\": [\"agent_llm\"]})\n", - " callback_manager = get_callback_manager_for_config(config)\n", - " messages = state[\"messages\"]\n", - "\n", - " llm_run_manager = callback_manager.on_chat_model_start({}, [messages])[0]\n", - " response = await openai_client.chat.completions.create(\n", - " messages=messages, model=\"gpt-3.5-turbo\", tools=[tool], stream=True\n", - " )\n", - "\n", - " response_content = \"\"\n", - " role = None\n", - "\n", - " tool_call_id = None\n", - " tool_call_function_name = None\n", - " tool_call_function_arguments = \"\"\n", - " async for chunk in response:\n", - " delta = chunk.choices[0].delta\n", - " if delta.role is not None:\n", - " role = delta.role\n", - "\n", - " if delta.content:\n", - " response_content += delta.content\n", - " # note: we're wrapping the response in ChatGenerationChunk so that we can stream this back using stream_mode=\"messages\"\n", - " chunk = ChatGenerationChunk(\n", - " message=AIMessageChunk(\n", - " content=delta.content,\n", - " )\n", - " )\n", - " llm_run_manager.on_llm_new_token(delta.content, chunk=chunk)\n", - "\n", - " if delta.tool_calls:\n", - " # note: for simplicity we're only handling a single tool call here\n", - " if delta.tool_calls[0].function.name is not None:\n", - " tool_call_function_name = delta.tool_calls[0].function.name\n", - " tool_call_id = delta.tool_calls[0].id\n", - "\n", - " # note: we're wrapping the tools calls in ChatGenerationChunk so that we can stream this back using stream_mode=\"messages\"\n", - " tool_call_chunk = ChatGenerationChunk(\n", - " message=AIMessageChunk(\n", - " content=\"\",\n", - " additional_kwargs={\"tool_calls\": [delta.tool_calls[0].dict()]},\n", - " )\n", - " )\n", - " llm_run_manager.on_llm_new_token(\"\", chunk=tool_call_chunk)\n", - " tool_call_function_arguments += delta.tool_calls[0].function.arguments\n", - "\n", - " if tool_call_function_name is not None:\n", - " tool_calls = [\n", - " {\n", - " \"id\": tool_call_id,\n", - " \"function\": {\n", - " \"name\": tool_call_function_name,\n", - " \"arguments\": tool_call_function_arguments,\n", - " },\n", - " \"type\": \"function\",\n", - " }\n", - " ]\n", - " else:\n", - " tool_calls = None\n", - "\n", - " response_message = {\n", - " \"role\": role,\n", - " \"content\": response_content,\n", - " \"tool_calls\": tool_calls,\n", - " }\n", - " return {\"messages\": [response_message]}" - ] - }, - { - "cell_type": "markdown", - "id": "3a3877e8-8ace-40d5-ad04-cbf21c6f3250", - "metadata": {}, - "source": [ - "### Define our tools and a tool-calling node" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "b756ea32", - "metadata": {}, - "outputs": [], - "source": [ - "import json\n", - "\n", - "\n", - "async def get_items(place: str) -> str:\n", - " \"\"\"Use this tool to look up which items are in the given place.\"\"\"\n", - " if \"bed\" in place: # For under the bed\n", - " return \"socks, shoes and dust bunnies\"\n", - " if \"shelf\" in place: # For 'shelf'\n", - " return \"books, penciles and pictures\"\n", - " else: # if the agent decides to ask about a different place\n", - " return \"cat snacks\"\n", - "\n", - "\n", - "# define mapping to look up functions when running tools\n", - "function_name_to_function = {\"get_items\": get_items}\n", - "\n", - "\n", - "async def call_tools(state):\n", - " messages = state[\"messages\"]\n", - "\n", - " tool_call = messages[-1][\"tool_calls\"][0]\n", - " function_name = tool_call[\"function\"][\"name\"]\n", - " function_arguments = tool_call[\"function\"][\"arguments\"]\n", - " arguments = json.loads(function_arguments)\n", - "\n", - " function_response = await function_name_to_function[function_name](**arguments)\n", - " tool_message = {\n", - " \"tool_call_id\": tool_call[\"id\"],\n", - " \"role\": \"tool\",\n", - " \"name\": function_name,\n", - " \"content\": function_response,\n", - " }\n", - " return {\"messages\": [tool_message]}" - ] - }, - { - "cell_type": "markdown", - "id": "6685898c-9a1c-4803-a492-bd70574ebe38", - "metadata": {}, - "source": [ - "### Define our graph" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "228260be-1f9a-4195-80e0-9604f8a5dba6", - "metadata": {}, - "outputs": [], - "source": [ - "import operator\n", - "from typing import Annotated, Literal\n", - "from typing_extensions import TypedDict\n", - "\n", - "from langgraph.graph import StateGraph, END, START\n", - "\n", - "\n", - "class State(TypedDict):\n", - " messages: Annotated[list, operator.add]\n", - "\n", - "\n", - "def should_continue(state) -> Literal[\"tools\", END]:\n", - " messages = state[\"messages\"]\n", - " last_message = messages[-1]\n", - " if last_message[\"tool_calls\"]:\n", - " return \"tools\"\n", - " return END\n", - "\n", - "\n", - "workflow = StateGraph(State)\n", - "workflow.add_edge(START, \"model\")\n", - "workflow.add_node(\"model\", call_model) # i.e. our \"agent\"\n", - "workflow.add_node(\"tools\", call_tools)\n", - "workflow.add_conditional_edges(\"model\", should_continue)\n", - "workflow.add_edge(\"tools\", \"model\")\n", - "graph = workflow.compile()" - ] - }, - { - "cell_type": "markdown", - "id": "d046e2ef-f208-4831-ab31-203b2e75a49a", - "metadata": {}, - "source": [ - "## Stream tokens" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "d6ed3df5", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[{'name': 'get_items', 'args': {}, 'id': 'call_h7g3jsgeRXIOUiaEC0VtM4EI', 'type': 'tool_call'}]\n", - "[{'name': 'get_items', 'args': {}, 'id': 'call_h7g3jsgeRXIOUiaEC0VtM4EI', 'type': 'tool_call'}]\n", - "[{'name': 'get_items', 'args': {}, 'id': 'call_h7g3jsgeRXIOUiaEC0VtM4EI', 'type': 'tool_call'}]\n", - "[{'name': 'get_items', 'args': {'place': ''}, 'id': 'call_h7g3jsgeRXIOUiaEC0VtM4EI', 'type': 'tool_call'}]\n", - "[{'name': 'get_items', 'args': {'place': 'bed'}, 'id': 'call_h7g3jsgeRXIOUiaEC0VtM4EI', 'type': 'tool_call'}]\n", - "[{'name': 'get_items', 'args': {'place': 'bedroom'}, 'id': 'call_h7g3jsgeRXIOUiaEC0VtM4EI', 'type': 'tool_call'}]\n", - "[{'name': 'get_items', 'args': {'place': 'bedroom'}, 'id': 'call_h7g3jsgeRXIOUiaEC0VtM4EI', 'type': 'tool_call'}]\n", - "In| the| bedroom|,| you| have| socks|,| shoes|,| and| some| dust| b|unn|ies|.|" - ] - } - ], - "source": [ - "from langchain_core.messages import AIMessageChunk\n", - "\n", - "first = True\n", - "async for msg, metadata in graph.astream(\n", - " {\"messages\": [{\"role\": \"user\", \"content\": \"what's in the bedroom\"}]},\n", - " stream_mode=\"messages\",\n", - "):\n", - " if msg.content:\n", - " print(msg.content, end=\"|\", flush=True)\n", - "\n", - " if isinstance(msg, AIMessageChunk):\n", - " if first:\n", - " gathered = msg\n", - " first = False\n", - " else:\n", - " gathered = gathered + msg\n", - "\n", - " if msg.tool_call_chunks:\n", - " print(gathered.tool_calls)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.4" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/docs/how-tos/streaming-tokens.ipynb b/docs/docs/how-tos/streaming-tokens.ipynb index edde2d3a50..e668c85a99 100644 --- a/docs/docs/how-tos/streaming-tokens.ipynb +++ b/docs/docs/how-tos/streaming-tokens.ipynb @@ -7,24 +7,47 @@ "source": [ "# How to stream LLM tokens from your graph\n", "\n", - "In this example we will stream tokens from the language model powering an agent. We will use a ReAct agent as an example.\n", + "!!! info \"Prerequisites\"\n", "\n", - "This how-to guide closely follows the others in this directory, so we will call out differences with the **STREAMING** tag below (if you just want to search for those).\n", + " This guide assumes familiarity with the following:\n", + " \n", + " - [Streaming](../../concepts/streaming/)\n", + " - [Chat Models](https://python.langchain.com/docs/concepts/chat_models/)\n", "\n", - "
\n", - "

Note

\n", - "

\n", - " In this how-to, we will create our agent from scratch to be transparent (but verbose). You can accomplish similar functionality using the create_react_agent(model, tools=tool) (API doc) constructor. This may be more appropriate if you are used to LangChain’s AgentExecutor class.\n", - "

\n", - "
\n", + "When building LLM applications with LangGraph, you might want to stream individual LLM tokens from the LLM calls inside LangGraph nodes. You can do so via `graph.stream(..., stream_mode=\"messages\")`:\n", "\n", - "
\n", - "

Note on Python < 3.11

\n", - "

\n", - " When using python 3.8, 3.9, or 3.10, please ensure you manually pass the RunnableConfig through to the llm when invoking it like so: llm.ainvoke(..., config).\n", - " The stream method collects all events from your nested code using a streaming tracer passed as a callback. In 3.11 and above, this is automatically handled via contextvar's; prior to 3.11, asyncio's tasks lacked proper contextvar support, meaning that the callbacks will only propagate if you manually pass the config through. We do this in the call_model method below.\n", - "

\n", - "
" + "```python\n", + "from langgraph.graph import StateGraph, MessagesState\n", + "from langchain_openai import ChatOpenAI\n", + "\n", + "model = ChatOpenAI()\n", + "def call_model(state: MessagesState):\n", + " model.invoke(state[\"messages\"])\n", + " ...\n", + "\n", + "graph = (\n", + " StateGraph(MessagesState)\n", + " .add_node(call_model)\n", + " ...\n", + " .compile()\n", + " \n", + "for msg, metadata in graph.stream(inputs, stream_mode=\"messages\"):\n", + " print(msg)\n", + "```\n", + "\n", + "The streamed outputs will be tuples of `(message chunk, metadata)`:\n", + "\n", + "* message chunk is the token streamed by the LLM\n", + "* metadata is a dictionary with information about the graph node where the LLM was called as well as the LLM invocation metadata\n", + "\n", + "!!! note \"Using without LangChain\"\n", + "\n", + " If you need to stream LLM tokens **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/streaming/#stream_modecustom) to stream the outputs from LLM provider clients directly. Check out the [example below](#using-without-langchain) to learn more.\n", + "\n", + "!!! warning \"Note on Python < 3.11\"\n", + " \n", + " When using python 3.8, 3.9, or 3.10, please ensure you manually pass the `RunnableConfig` through to the chat model when invoking it like so: `model.ainvoke(..., config)`.\n", + " The stream method collects all events from your nested code using a streaming tracer passed as a callback. In 3.11 and above, this is automatically handled via [contextvars](https://docs.python.org/3/library/contextvars.html); prior to 3.11, [asyncio's tasks](https://docs.python.org/3/library/asyncio-task.html#asyncio.create_task) lacked proper `contextvar` support, meaning that the callbacks will only propagate if you manually pass the config through. We do this in the `call_model` function below." ] }, { @@ -39,13 +62,13 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 1, "id": "af4ce0ba-7596-4e5f-8bf8-0b0bd6e62833", "metadata": {}, "outputs": [], "source": [ - "%%capture --no-stderr\n", - "%pip install --quiet -U langgraph langchain_openai langsmith" + "# %%capture --no-stderr\n", + "# %pip install --quiet -U langgraph langchain_openai" ] }, { @@ -58,10 +81,18 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 3, "id": "a372be6f", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdin", + "output_type": "stream", + "text": [ + "OPENAI_API_KEY: ········\n" + ] + } + ], "source": [ "import getpass\n", "import os\n", @@ -90,342 +121,390 @@ }, { "cell_type": "markdown", - "id": "cd420984", + "id": "8a592001", "metadata": {}, "source": [ - "## Set up the state\n", "\n", - "The main type of graph in `langgraph` is the [StateGraph](https://langchain-ai.github.io/langgraph/reference/graphs/#langgraph.graph.StateGraph).\n", - "This graph is parameterized by a `State` object that it passes around to each node.\n", - "Each node then returns operations the graph uses to `update` that state.\n", - "These operations can either SET specific attributes on the state (e.g. overwrite the existing values) or ADD to the existing attribute.\n", - "Whether to set or add is denoted by annotating the `State` object you use to construct the graph.\n", - "\n", - "For this example, the state we will track will just be a list of messages.\n", - "We want each node to just add messages to that list.\n", - "Therefore, we will use a `TypedDict` with one key (`messages`) and annotate it so that the `messages` attribute is \"append-only\"." + "After we've done this, we should make sure the model knows that it has these tools available to call.\n", + "We can do this by converting the LangChain tools into the format for function calling, and then bind them to the model class.\n" ] }, { - "cell_type": "code", - "execution_count": 1, - "id": "17ef4967", + "cell_type": "markdown", + "id": "e03c5094-9297-4d19-a04e-3eedc75cefb4", "metadata": {}, - "outputs": [], "source": [ - "from typing import Annotated\n", - "\n", - "from typing_extensions import TypedDict\n", - "\n", - "from langgraph.graph.message import add_messages\n", + "!!! note Manual Callback Propagation\n", "\n", - "# Add messages essentially does this with more\n", - "# robust handling\n", - "# def add_messages(left: list, right: list):\n", - "# return left + right\n", - "\n", - "\n", - "class State(TypedDict):\n", - " messages: Annotated[list, add_messages]" + " Note that in `call_model(state: State, config: RunnableConfig):` below, we a) accept the [`RunnableConfig`](https://python.langchain.com/api_reference/core/runnables/langchain_core.runnables.config.RunnableConfig.html#langchain_core.runnables.config.RunnableConfig) in the node function and b) pass it in as the second arg for `model.ainvoke(..., config)`. This is optional for python >= 3.11." ] }, { "cell_type": "markdown", - "id": "81ed4e9c", + "id": "ad2c85b6-28f8-4c7f-843a-c05cb7fd7187", "metadata": {}, "source": [ - "## Set up the tools\n", - "\n", - "We will first define the tools we want to use.\n", - "For this simple example, we will use create a placeholder search engine.\n", - "It is really easy to create your own tools - see documentation [here](https://python.langchain.com/docs/how_to/custom_tools) on how to do that.\n" + "## Example" ] }, { "cell_type": "code", - "execution_count": 2, - "id": "9a8bc61e", + "execution_count": 4, + "id": "7cc5905f-df82-4b31-84ad-2054f463aee8", "metadata": {}, "outputs": [], "source": [ - "from langchain_core.tools import tool\n", + "from typing import TypedDict\n", + "from langgraph.graph import START, StateGraph, MessagesState\n", + "from langchain_openai import ChatOpenAI\n", + "\n", "\n", + "# Note: we're adding the tags here to be able to filter the model outputs down the line\n", + "joke_model = ChatOpenAI(model=\"gpt-4o-mini\").with_config(tags=[\"joke\"])\n", + "poem_model = ChatOpenAI(model=\"gpt-4o-mini\").with_config(tags=[\"poem\"])\n", "\n", - "@tool\n", - "def search(query: str):\n", - " \"\"\"Call to surf the web.\"\"\"\n", - " # This is a placeholder, but don't tell the LLM that...\n", - " return [\"Cloudy with a chance of hail.\"]\n", + "\n", + "class State(TypedDict):\n", + " topic: str\n", + " joke: str\n", + " poem: str\n", "\n", "\n", - "tools = [search]" + "# highlight-next-line\n", + "async def call_model(state, config):\n", + " topic = state[\"topic\"]\n", + " print(\"Writing joke...\")\n", + " # Note: Passing the config through explicitly is required for python < 3.11\n", + " # Since context var support wasn't added before then: https://docs.python.org/3/library/asyncio-task.html#creating-tasks\n", + " joke_response = await joke_model.ainvoke(\n", + " [{\"role\": \"user\", \"content\": f\"Write a joke about {topic}\"}],\n", + " # highlight-next-line\n", + " config,\n", + " )\n", + " print(\"\\n\\nWriting poem...\")\n", + " poem_response = await poem_model.ainvoke(\n", + " [{\"role\": \"user\", \"content\": f\"Write a short poem about {topic}\"}],\n", + " # highlight-next-line\n", + " config,\n", + " )\n", + " return {\"joke\": joke_response.content, \"poem\": poem_response.content}\n", + "\n", + "\n", + "graph = StateGraph(State).add_node(call_model).add_edge(START, \"call_model\").compile()" ] }, { - "cell_type": "markdown", - "id": "b0aa12b9", + "cell_type": "code", + "execution_count": 5, + "id": "96050fba", "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Writing joke...\n", + "Why| did| the| cat| sit| on| the| computer|?\n", + "\n", + "|Because| it| wanted| to| keep| an| eye| on| the| mouse|!|\n", + "\n", + "Writing poem...\n", + "In| sun|lit| patches|,| they| softly| tread|,| \n", + "|Wh|isk|ers| twitch|ing|,| with| grace| they| spread|.| \n", + "|With| eyes| like| lantern|s|,| glowing| bright|,| \n", + "|They| dance| through| shadows|,| a| silent| flight|.| \n", + "\n", + "|P|aws| like| whispers| on| the| floor|,| \n", + "|Cur|led| up| tight|,| they| dream| and| sn|ore|.| \n", + "|Ch|asing| ph|ant|oms| in| the| night|,| \n", + "|F|eline| secrets|,| hidden| from| sight|.| \n", + "\n", + "|A| gentle| p|urr|,| a| playful| sw|at|,| \n", + "|In| every| corner|,| a| cozy| spot|.| \n", + "|Maj|estic| hunters|,| soft| as| a| sigh|,| \n", + "|In| the| hearts| of| many|,| forever| they| lie|.| |" + ] + } + ], "source": [ - "We can now wrap these tools in a simple [ToolNode](https://langchain-ai.github.io/langgraph/reference/prebuilt/#toolnode).\n", - "This is a simple class that takes in a list of messages containing an [AIMessages with tool_calls](https://api.python.langchain.com/en/latest/messages/langchain_core.messages.ai.AIMessage.html#langchain_core.messages.ai.AIMessage.tool_calls), runs the tools, and returns the output as [ToolMessage](https://api.python.langchain.com/en/latest/messages/langchain_core.messages.tool.ToolMessage.html#langchain_core.messages.tool.ToolMessage)s.\n" + "async for msg, metadata in graph.astream(\n", + " {\"topic\": \"cats\"},\n", + " # highlight-next-line\n", + " stream_mode=\"messages\",\n", + "):\n", + " if msg.content:\n", + " print(msg.content, end=\"|\", flush=True)" ] }, { "cell_type": "code", - "execution_count": 3, - "id": "4d6ac180", + "execution_count": 6, + "id": "bcdf561d-a5cd-4197-9c65-9ab8af85941f", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "{'langgraph_step': 1,\n", + " 'langgraph_node': 'call_model',\n", + " 'langgraph_triggers': ['start:call_model'],\n", + " 'langgraph_path': ('__pregel_pull', 'call_model'),\n", + " 'langgraph_checkpoint_ns': 'call_model:eeaca45a-85f2-c80f-e985-704a168a5d8c',\n", + " 'checkpoint_ns': 'call_model:eeaca45a-85f2-c80f-e985-704a168a5d8c',\n", + " 'ls_provider': 'openai',\n", + " 'ls_model_name': 'gpt-4o-mini',\n", + " 'ls_model_type': 'chat',\n", + " 'ls_temperature': 0.7}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "from langgraph.prebuilt import ToolNode\n", - "\n", - "tool_node = ToolNode(tools)" + "metadata" ] }, { "cell_type": "markdown", - "id": "4f13e0a5", + "id": "7db91f8d-3e17-47f4-b45e-c72bbbcbb5ed", "metadata": {}, "source": [ - "## Set up the model\n", - "\n", - "Now we need to load the chat model we want to use.\n", - "This should satisfy two criteria:\n", - "\n", - "1. It should work with messages, since our state is primarily a list of messages (chat history).\n", - "2. It should work with tool calling, since we are using a prebuilt [ToolNode](https://langchain-ai.github.io/langgraph/reference/prebuilt/#toolnode)\n", - "\n", - "**Note:** these model requirements are not requirements for using LangGraph - they are just requirements for this particular example.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "42c0af37", - "metadata": {}, - "outputs": [], - "source": [ - "from langchain_openai import ChatOpenAI\n", - "\n", - "model = ChatOpenAI(model=\"gpt-3.5-turbo\")" + "### Filter to specific LLM invocation" ] }, { "cell_type": "markdown", - "id": "8a592001", + "id": "a3a72acd-98cc-43f6-9dbb-0e97d03d211b", "metadata": {}, "source": [ - "\n", - "After we've done this, we should make sure the model knows that it has these tools available to call.\n", - "We can do this by converting the LangChain tools into the format for function calling, and then bind them to the model class.\n" + "You can see that we're streaming tokens from all of the LLM invocations. Let's now filter the streamed tokens to include only a specific LLM invocation. We will use `.astream_events()` method for this, and filter events using the tags we've added to the LLMs previously:" ] }, { "cell_type": "code", - "execution_count": 5, - "id": "2bbdd3bc", + "execution_count": 7, + "id": "17354369-32d9-4413-8b4b-2ae9786948f7", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Writing joke...\n", + "Why| did| the| cat| sit| on| the| computer|?\n", + "\n", + "|Because| it| wanted| to| keep| an| eye| on| the| mouse|!| 🐱|💻|\n", + "\n", + "Writing poem...\n" + ] + } + ], "source": [ - "model = model.bind_tools(tools)" + "# highlight-next-line\n", + "async for event in graph.astream_events(\n", + " {\"topic\": \"cats\"},\n", + " # highlight-next-line\n", + " version=\"v2\",\n", + "):\n", + " # filter on the custom tag\n", + " # highlight-next-line\n", + " if event[\"event\"] == \"on_chat_model_stream\" and \"joke\" in event.get(\"tags\", []):\n", + " data = event[\"data\"]\n", + " if data[\"chunk\"].content:\n", + " print(data[\"chunk\"].content, end=\"|\", flush=True)" ] }, { "cell_type": "markdown", - "id": "e03c5094-9297-4d19-a04e-3eedc75cefb4", + "id": "be8fd3d7-a227-41ad-bd08-7ef994ab291b", "metadata": {}, "source": [ - "## Define the nodes\n", - "\n", - "We now need to define a few different nodes in our graph.\n", - "In `langgraph`, a node can be either a function or a [runnable](https://python.langchain.com/docs/concepts/#langchain-expression-language-lcel).\n", - "There are two main nodes we need for this:\n", - "\n", - "1. The agent: responsible for deciding what (if any) actions to take.\n", - "2. A function to invoke tools: if the agent decides to take an action, this node will then execute that action.\n", - "\n", - "We will also need to define some edges.\n", - "Some of these edges may be conditional.\n", - "The reason they are conditional is that based on the output of a node, one of several paths may be taken.\n", - "The path that is taken is not known until that node is run (the LLM decides).\n", - "\n", - "1. Conditional Edge: after the agent is called, we should either:\n", - " a. If the agent said to take an action, then the function to invoke tools should be called\n", - " b. If the agent said that it was finished, then it should finish\n", - "2. Normal Edge: after the tools are invoked, it should always go back to the agent to decide what to do next\n", - "\n", - "Let's define the nodes, as well as a function to decide how what conditional edge to take.\n", - "\n", - "**STREAMING**\n", - "\n", - "We define each node as an async function.\n", - "\n", - "
\n", - "

Manual Callback Propagation

\n", - "

\n", - " Note that in call_model(state: State, config: RunnableConfig): below, we a) accept the RunnableConfig in the node and b) pass this in as the second arg for llm.ainvoke(..., config). This is optional for python 3.11 and later.

\n", - "
" + "## Example without LangChain" ] }, { "cell_type": "code", - "execution_count": 6, - "id": "3b541bb9-900c-40d0-964d-7b5dfee30667", + "execution_count": 8, + "id": "699b3bab-9da7-4f2a-8006-93289350d89d", "metadata": {}, "outputs": [], "source": [ - "from typing import Literal\n", + "from openai import AsyncOpenAI\n", "\n", - "from langchain_core.runnables import RunnableConfig\n", + "openai_client = AsyncOpenAI()\n", + "model_name = \"gpt-4o-mini\"\n", "\n", - "from langgraph.graph import END, START, StateGraph\n", "\n", + "async def stream_tokens(model_name: str, messages: list[dict]):\n", + " response = await openai_client.chat.completions.create(\n", + " messages=messages, model=model_name, stream=True\n", + " )\n", + " async for chunk in response:\n", + " delta = chunk.choices[0].delta\n", "\n", - "# Define the function that determines whether to continue or not\n", - "def should_continue(state: State):\n", - " messages = state[\"messages\"]\n", - " last_message = messages[-1]\n", - " # If there is no function call, then we finish\n", - " if not last_message.tool_calls:\n", - " return END\n", - " # Otherwise if there is, we continue\n", - " else:\n", - " return \"tools\"\n", + " if delta.content:\n", + " yield {\"role\": delta.role, \"content\": delta.content}\n", "\n", "\n", - "# Define the function that calls the model\n", - "async def call_model(state: State, config: RunnableConfig):\n", - " messages = state[\"messages\"]\n", - " # Note: Passing the config through explicitly is required for python < 3.11\n", - " # Since context var support wasn't added before then: https://docs.python.org/3/library/asyncio-task.html#creating-tasks\n", - " response = await model.ainvoke(messages, config)\n", - " # We return a list, because this will get added to the existing list\n", - " return {\"messages\": response}" + "# highlight-next-line\n", + "async def call_model(state, config, writer):\n", + " topic = state[\"topic\"]\n", + " joke = \"\"\n", + " poem = \"\"\n", + "\n", + " print(\"Writing joke...\")\n", + " async for msg_chunk in stream_tokens(\n", + " model_name, [{\"role\": \"user\", \"content\": f\"Write a joke about {topic}\"}]\n", + " ):\n", + " joke += msg_chunk[\"content\"]\n", + " metadata = {**config[\"metadata\"], \"tags\": [\"joke\"]}\n", + " chunk_to_stream = (msg_chunk, metadata)\n", + " # highlight-next-line\n", + " writer(chunk_to_stream)\n", + "\n", + " print(\"\\n\\nWriting poem...\")\n", + " async for msg_chunk in stream_tokens(\n", + " model_name, [{\"role\": \"user\", \"content\": f\"Write a short poem about {topic}\"}]\n", + " ):\n", + " poem += msg_chunk[\"content\"]\n", + " metadata = {**config[\"metadata\"], \"tags\": [\"poem\"]}\n", + " chunk_to_stream = (msg_chunk, metadata)\n", + " # highlight-next-line\n", + " writer(chunk_to_stream)\n", + "\n", + " return {\"joke\": joke, \"poem\": poem}\n", + "\n", + "\n", + "graph = StateGraph(State).add_node(call_model).add_edge(START, \"call_model\").compile()" ] }, { "cell_type": "markdown", - "id": "ffd6e892-946c-4899-8cc0-7c9291c1f73b", + "id": "8af13d73-a0ea-44c0-a92e-28676cd164dd", "metadata": {}, "source": [ - "## Define the graph\n", + "!!! note \"stream_mode=\"custom\"\"\n", "\n", - "We can now put it all together and define the graph!" + " When streaming LLM tokens without LangChain, we recommend using [`stream_mode=\"custom\"`](../streaming/streaming/#stream-modecustom). This allows you to explicitly control which data from the LLM provider APIs to include in LangGraph streamed outputs, including any additional metadata." ] }, { "cell_type": "code", - "execution_count": 7, - "id": "813ae66c-3b58-4283-a02a-36da72a2ab90", + "execution_count": 9, + "id": "e977406d-7be6-4c9f-9185-5e5551f848f3", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Writing joke...\n", + "Why| did| the| cat| sit| on| the| computer|?\n", + "\n", + "|Because| it| wanted| to| keep| an| eye| on| the| mouse|!|\n", + "\n", + "Writing poem...\n", + "In| shadows| soft|,| on| silent| paws|,| \n", + "|A| whisk|ered| muse| with| gentle| claws|,| \n", + "|They| weave| through| dreams| in| moon|lit| grace|,| \n", + "|A| dance| of| warmth| in| a| sun|lit| place|.| \n", + "\n", + "|With| eyes| like| stars|,| they| peer| so| wise|,| \n", + "|The| world| reflected| in| their| guise|.| \n", + "|From| playful| leaps| to| cozy| curls|,| \n", + "|In| each| sweet| p|urr|,| a| magic| sw|irls|.| \n", + "\n", + "|Oh|,| feline| friends|,| with| hearts| so| bold|,| \n", + "|In| every| tale|,| your| love| unfolds|.| \n", + "|A| quiet| comfort|,| a| steadfast| glance|,| \n", + "|In| the| company| of| cats|,| we| find| our| trance|.|" + ] + } + ], "source": [ - "# Define a new graph\n", - "workflow = StateGraph(State)\n", - "\n", - "# Define the two nodes we will cycle between\n", - "workflow.add_node(\"agent\", call_model)\n", - "workflow.add_node(\"tools\", tool_node)\n", - "\n", - "# Set the entrypoint as `agent`\n", - "# This means that this node is the first one called\n", - "workflow.add_edge(START, \"agent\")\n", - "\n", - "# We now add a conditional edge\n", - "workflow.add_conditional_edges(\n", - " # First, we define the start node. We use `agent`.\n", - " # This means these are the edges taken after the `agent` node is called.\n", - " \"agent\",\n", - " # Next, we pass in the function that will determine which node is called next.\n", - " should_continue,\n", - " # Next we pass in the path map - all the nodes this edge could go to\n", - " [\"tools\", END],\n", - ")\n", - "\n", - "workflow.add_edge(\"tools\", \"agent\")\n", - "\n", - "# Finally, we compile it!\n", - "# This compiles it into a LangChain Runnable,\n", - "# meaning you can use it as you would any other runnable\n", - "app = workflow.compile()" + "async for msg, metadata in graph.astream(\n", + " {\"topic\": \"cats\"},\n", + " # highlight-next-line\n", + " stream_mode=\"custom\",\n", + "):\n", + " print(msg[\"content\"], end=\"|\", flush=True)" ] }, { "cell_type": "code", - "execution_count": 8, - "id": "72785b66", + "execution_count": 10, + "id": "0bdc1635-f424-4a5f-95db-e993bb16adb2", "metadata": {}, "outputs": [ { "data": { - "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCAD5ANYDASIAAhEBAxEB/8QAHQABAAMBAQEBAQEAAAAAAAAAAAUGBwQIAwIBCf/EAFIQAAEEAQIDAgYMCgcECwAAAAEAAgMEBQYRBxIhEzEIFCJBUZQVFhcyNlVWYZPR0tMjMzVCVHF1gZW0JlJzdKGyswmRsfAYREVTYmSChKLBw//EABsBAQACAwEBAAAAAAAAAAAAAAABAgMEBQYH/8QANREBAAECAgcFBgYDAQAAAAAAAAECEQMhBBIxUXGR0RRBUmGhBRMVM4GxIiNiksHwMkLh8f/aAAwDAQACEQMRAD8A/wBU0REBERAREQEREBEXxu3IMdUmtWZGw14Wl75HdzQO8qYi+UD7Ljt5nH0H8lq9WrP/AKs0zWn/ABKgmYu7q9osZOW3jsY8bxYqJ5hke3+tO9p5tz/3bSAB0dzdw7aeh9OUGclfA42Fvn5KkYJ8/U7dTv13Kz6mHTlXOfl1/vFOXe6PbVhfjih6yz609tWF+OKHrLPrT2q4X4noerM+pParhfieh6sz6k/J8/ROR7asL8cUPWWfWntqwvxxQ9ZZ9ae1XC/E9D1Zn1J7VcL8T0PVmfUn5Pn6GR7asL8cUPWWfWntqwvxxQ9ZZ9ae1XC/E9D1Zn1J7VcL8T0PVmfUn5Pn6GT+t1RhnEBuXoknzCyz61IRSsmjD43tkY7uc07g/vUadKYRwIOHoEHoQarPqXBJoHFwSGfEsdgLm4PbYzaJrtumz49uR426eU0/MQQClsGdkzH9/u8yWRFDYTMWJbMuMycbIsrAwSF0LSIbMZOwli3JIG/RzCSWEgEkFjnTKw1UzRNpVERFUEREBERAREQEREBERAREQFWNUbZPUOn8K4B0Er5MhOw7+WyDk5R9LLC7/wBO3nVnVZzDfFNeacuOB7Oatcx+4G4539lM3c+bpXk/52Wxgf538p+0pjasyIi10Co7+NmjGa+OixmDJqNsjYH1oak8kccjmc7Y3zNYYmPLevK5wO3mV4XmzOezGm/CDil0Dg9W058tmqzdTw28cTgL1XsQJLsc56RzMaGtHK4F7mbFh7yFw4WeEzp7iPBrCaavdw8Wnbd4Sy2cfbZEalZ4aZnSPha1ryPKMO/O0d46Eqx6S496F1xVzM+Hzhm9h63jl6GxSsVpooNnHteylja9zNmu2c1pB26LHcVkdc6G0xxo01p/TWai1nPmcvnMJkDjnSUJ453iSMxzn8G6XZxAjcd+ZuxGyrOA05kZ+IueymPwvEO7jchw+yGKGT1dBZfNPeEjJOzEcnlRbtJ5QGsY53MGAoNS1t4X+jMHoVmpNPuu6lrSW6FdkkGMutgc2zKG84l7AtcWtEhLB152CM7PcAtl09n6eqcLUytDxjxO03nj8aqy1pdtyPKila17T07nNBXn/VWi82fAy0Vi6GBuT5jD0NPXJ8NFAW2j4tLVlnjEZ2PaARv8k9SRt3redJami1hgK2VhoZLGRzl21XL031LLOVxb5UTwHN323G46ggoJhERBV9e7Y+jSzbNmz4u1HIXemF7xHM35xyOJ2PTma0+YEWhVjiOO30pPRbuZchLDSYANyTJI1pP6gC5x+YFWdbFWeFTM755ZfzMp7hERa6BERAREQEREBERAREQEREBR2fwseexrqzpDBI17JoZ2jd0UrHBzHj9RA3HnG4PQqRRWpqmmYqjbAg8NqQWbIxmSEdHOMbu+rzeTMB3yQk+/Z/i3fZ2xVVl8HDhVPK+WThzpeSR7i5z3YmAlxPeSeVXjL4Shnqvi2QqRW4QeZokbuWu8zmnvaR6RsVCjQjYQW1c9nake2wYLxmDf1GUPP+KzWwq876s846/3anKVfd4NnChziXcN9LOcepJxMBJ/+Kv+Ox1XD4+rQo14qdKrE2CCvAwMjijaA1rGtHQAAAAD0KA9pNj5VZ76aH7pPaTY+VWe+mh+6T3eH4/SS0b1oRVf2k2PlVnvpofulU+HWPyuqaOblvapzAfTzV+hF2MsIHZQzuYzf8GfK5QN/n8wT3eH4/SS0b2qKnaq4OaE11lBktRaPwecyAjEXjWQoRTScg32bzOaTsNz0+ddPtJsfKrPfTQ/dJ7SbHyqz300P3Se7w/H6SWjer3/AEauE2+/ubaW/hEH2VZdP6V0pwtwlmHDYvFaWxJlNiZlSGOtCZCGt53AADchrRv8wC+Y0RP59U55w9HbxD/hHuumhofF0rkVyVs+SuxEOjsZGw+w6M7bbsDyWsO2/VoHefSU1cKNtV+Edf8Apk+WPgl1Jlq2YswPr0agccdXmY5kpe4Frp3tO3KS0lrWkbhrnl3V3K2yIix1168+UEiIixoEREBERAREQEREBERAREQEREBERAREQFnvBcg4rVHKSf6T5bv9PjT/AJz/AM+haEs94L7+xWqN9vhPlvegfpT/AEINCREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQFnnBUbYnVPlB39KMt3D/AM09aGs84K7exOqduv8ASjLd42/609BoaIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIuXKZOthcdYvW5OyrV2GR7g0uOw8wA6knuAHUkgDqqq/UeqbB7SviMZWid1bHbuvMoHm5uSMtB9IBcPnKz4eDXiReNnnkmy6IqR7O6w/QMH63N92ns7rD9Awfrc33ay9lr3xzgs+vGLXtzhfwz1Bquhg5NST4muLRxsU/YuljDm9oQ/ldtysLn9x35dum+68x+Bl4XVvjNrjL6WoaFkpVJrd7OXMo7JB7ajJZS9jOQQt5yXua3vHnd5tj6TsZXVduvLBPjMDNDK0skjkszFrmkbEEGLqCFlHg+cB7fg50tQwYCnibL8zfdakmsWpeeOIb9lANo+rWAu6+cklOy1745wWekkVI9ndYfoGD9bm+7T2d1h+gYP1ub7tOy1745wWXdFSfZ3WH6Bg/W5vu1K6f1PPeuux2UqR0MkIzMxsMplhmjBAc5jy1p3BIBaQCNx3g7qlWj10xrZTwmCywoiLWQIiICIiAiIgIiICIiAiIgIiICIiCp8UTto+T57tEHf0G3CCulcvFL4Hv8A79R/m4V1Lp4fyI4z9qVu4RERUREQERcOKzmPzgtnH3YLoqWH1JzXkDxFMw7Pjdt3OaehHeD0KDuUPMdtf6a288Vsfu5WfUFMKGn+H+mf7K3/AJGK9Hfwq+0phfERFyUCIiAiIgIiICIiAiIgIiICIiAiIgqXFL4Hv/v1H+bhXUuXil8D3/36j/NwrqXTw/kRxn7UrdyqcV9UUNF8NdS5rKWb1OjUoyuknxm3jTN28oMO/QSbkcpPTfbfovNmIzXE/Qme1rgqbcuL1jQ9jO4rGZnOnN2Yrccoja4PdG3kcQ/8UC9hcwbHrsvV+ewWP1PhbuIytSK/jbsLoLFaZu7JGOGxaR+pZ3J4Oei8Xjcm/CYOFuYs4u1i22slct2BLDMwNMM7jLzyReS3yS7ydjycpO6pVEzOSrAaGusnpupqPWuidRap1fhMXoOe3LY1BYszV4cq6SMtIbJs0uDGPe5gBawN6cvMd73w60nxRxuVwmeflXy4CxSmmyklzV02WF1j4C6KWCJ1SNsLhJyO/BuDeUkbdyleEPAfVGk9XeP5mxjsfgfEJqVnB4/M5LKQZBz+UB8jbriIw0NcA1gJPOQSQtC0bwG0LoDIy3cDhDRnfBJVaDcsSsiieQXxxMfI5sTSWjowNHQKsRIwzhvez2C0pwB1bLq3UOXv6otQY3LQZPIvnrTxS0p5Gnsj5LXsdCwh4Ac7rzFxJJuXgqaLqYSbiDfhyGYsTR6sy9DsbmVsWIeRtgEPMb3lplOw3kI5zudydytUq8KtLUsLpXEw4vkx+l547GIh8YlPi0jI3xsO5du/Zkjxs8uHXfvAX9xHCzTGA1nktVY7Gup5vI7m3LFZmEUziGgvMPP2fOeVu7+XmO3epimYFrUNP8P9M/2Vv/IxTKhp/h/pn+yt/wCRiz0d/Cr7SmF8REXJQIiICIiAiIgIiICIiAiIgIiICIiCpcUvge/+/Uf5uFdS+OrrEOepT4Sm2S9adZhr2DTkYTj3Eds2WUFw2ADWu5Ru48zOmzuYRjrmo6YEc2l7F2VvQzULVfsn/OBJIxwB9BHRdLBmKsKKLxeJmc5iNsRv4LbYsmUUJ7LZ75GZX1ql9+nstnvkZlfWqX36y6n6o/dHUsm0UJ7LZ75GZX1ql9+uPGasy2XjnfW0bmS2CeSs/tZKsZ52OLXbB0w3G46OHQ94JCan6o/dHUss6KE9ls98jMr61S+/T2Wz3yMyvrVL79NT9UfujqWTahp/h/pn+yt/5GL+DLZ7f4G5Qf8AuqX36xDDeGlwui4k5GlqzLT6SymElmxrKeQqykCQuaJHve1paCCzlA7gOY8x5gGpthxMzMbJjKYnbFu6S1nqxFz47I1Mvj6t+hahu0bUTZ4LNeQSRTRuAc17HDcOaQQQR0IK6FyFRERAREQEREBERAREQEREBEXBmss3C46e0a9i7JGzmZUps55pjuAGsbuB1LmjckNG+7i0AkB3Eho3JA83VVdlu7raqDUdLjtO3qDi260yV8gZHP2HIxzB2TeQE8x8rd7dg0t3Pc3ASZC+LWZfDb8VuOsY6GJjmNrt7PkaX+URI/q93MQAOcADdvMZtB8KtKCk17a8LIQ9xe/kaBzOPe4+k9B1PVfdEQEREBV/Rs4nrZT8PkrHJk7TCclHyObtIfJj6DeIdzD527KwKuaUnHslqeqbWRsvrZPr4+zlYwPghkDYHfnRDn238zucfmoLGiIgLyZxZ8A7HcU/CRrcQZMjWx+CdXZYvUBXbO+1fiIEfNG9hjMLgGmQO3LuUt2/CFzfWaIKhpzWVxmUi09qitDjtQOa50E1YuNPJNbuS+BzurX7DmdC4lzOuxkYO0db1Gaj01jNW4mXGZenHepSFrix+4LHtIcyRjhs5j2uAc17SHNcAWkEAqs0szktB3IMbqOyb+EnkENLUMpAfG9xPLBbAAa0nyWsmHR7iGODXlnaheUREBERAREQEREBERAREQcGZzEODptnmjmmL5Y4I4q8TpZHve4NaA1oPTc7lx8lrQ5ziGtJHPi8EYbYyeRNe5muydX8bhhMbWQl5cI2NLnFo97zHfyywE9A0N+FSCbIatt3J4L9WLHRmnW57AFa0JBHJJKImnqWlrWBz+o2kDQA5xdPICIiAiIgIiICrxkfjdchrpMpYhytMBjBHz0qskDiSeYdWSStmHQ+SRX6bH31hXDmsRHm6Pi0k9mttJHMyapMYpGPY8PaQ4ebdoBad2uaS1wLSQQ7kUVgszJkGOrXoq9HNQsbJZx8Vps5ia572skBABLHmN5a5zWk8p3AIIEqgIiIC+NupBfqzVbUMdmtMx0csMrQ5kjCNi1wPQggkEFfZEFE0DdsafzmS0Penks+x0MdvFWp5C+Weg8lrWPc4lz3wvaYy47lzTE5xLnOKvaz7WDzS4vcPLLH8rrMWSxz27nymOijn7u7o6s3v+fbvK0FAREQEREBERAREQERR2odSYnSOHsZbO5SlhcVX5e2vZGwyCCLmcGt5nvIaN3OaBuepIHnQR+i6XidfLOOMnxb58palcyex2xm/CECYHfyWvaGuDfzQQFYVlHCHi9w/wBTWL+JwWqsJZyljK35YsdDnoLliwO2e8zRta8u5HAF4aB5Lf1LV0BERAREQEREBERBwZXFm+1kkE/iV+LbsrbI2vc1vM1zmHmHVj+QBwGxI7i1wa4fjF5nx+SWvYrvoXo3yDxWZ7S58bXlrZW8pILHDZw845gHBrgQJJYB4Y1bipa0DBFwmwkdrOStmhtZmG0yG/j6zg3nZW5i080mwBcHbt7MEN5uV8Yblhs1j9RYyDI4q/WyePsAuht05mzRSAEglr2kg9QR0PmXavFf+zx4lTaV4V5TRet3u09Ywl1zqJyp7FskMpJcxhdsDyyNfvt/XC9V+6lo75U4j12P61sdnxvBPKU2nctKKre6lo75U4j12P6091LR3ypxHrsf1p2fG8E8pTqzuQ+qmm9xn0DVazmFWjlci52x8nlFeAfNufGTtv37HbuK0FY9jOIWmbvGfO5SfPY2KjRw1ShTnktRhsskks0tjkO/UANrA7HvB9CvHupaO+VOI9dj+tOz43gnlJqzuWlFVvdS0d8qcR67H9amMNqLFaiikkxeSqZFkZAe6rO2QNJ6gHlJ2/eq1YOJRF6qZiOCLTCRREWFAiLKuKPEexWuS4DCzGCeMDx28z30W4BEUfoeQQS780EbeU7dm1o2jYmlYkYeH/4L1nNa4HTUgiyeXp0piOYQySjtCPSGe+2+fZQp4zaNB/LTP3QS/ZWFQ1o4C4sb5byXPe4lz3knclzj1J385X0XqqPYeBEfjrmZ8rR/El4bj7s2jfjpvq8v2FWuJWqeHXFHQWd0nl8u12Py1V9aQitKTGT1bIPJ981wa4fO0LM0V/gejeKrnHQvDJPAG4X4XgfktV6k1jdhgz0kzsXjmiKR4FZrt3zNIaekhDdu4gMPTyl7M92bRvx031eX7Cw5E+B6N4qucdC8Nx92bRvx031eX7C+9bi3o+08NGoKkRPcbBMI/wB7wAsHQgOBBG4PeCk+w9H7qqvToXh6jilZPEySN7ZI3gOa9h3Dge4gr9rzXpLUt3Qtvtsbu+k5xdPjOflil37y0dzH+fcbb/nb949D4XMVNQYqtkaMomq2Gc7Hf8QR5iDuCPMQV5zTtAr0KqLzemdk9Tg7URFywREQFUeIj+3iwWMkO9TJ5HxezHt0ljbBNMWO/wDC4wgEdxBLSCCQrcqdxA/Kui/2w/8AkLa2tG+bH19IlMbXaxjY2hrGhrQNgANgF/URbKBERAREQFBZ8txuUwmTgHZW/H4ajpG9DJFK8Mcx3pHUO677FoI6qdUBq/8AF4X9sUv9dqy4WdcRvWp2tCREXHVfC7abRpz2X+8hjdI79QG//wBLyxj55blVtud3PZtk2Zn7bcz3nmcf95Xqi7Vbepz1n+8mjdG79RGxXlehXlpVm07DeSzUJrTM335XsPK7/EL1vsHVtib8v5J2OhFC6i1fjtLGuL7b7u35uTxLHWLfdtvzdjG/l7x37b9du4qH91vT/KT2Wc2B2+DuQ+4Xp5xKKZtNUX4qOriPxAocNtODKXg15lnjq14nzMhbJM8+SHSPIaxuwJLidgGk9e5Z/F4SdMYfUM8uOp2chh60NwwYjLxXoJ4nyiM8szAOV7SerXNHe3rsdxNawbV4wYytVwVi5QzOHuQ5anLlcRagrmWMkBr+1jZzNcHuaQ07jffbovzqXRWr9bcPdQYbJx6eoZC6IWVRj3zGJobI1zzJI5gJ35egDOnz960sSvGqqmcKcrZWiJvNp7+KXZFxZlxOUy1LVeGGn3UcU7NNkiti02Ss13K/fZreWRp5RyjmB5hs4qtw671Rn+JPDrx7B2NNYnIeOysi9kRIbLPFi5gniaAGuHRwBLtj5wQrFrrhXLrnVF+xPZigxVzTVnCPLSTMyWSaN7Xhu2xaAw+fffbp51EVtLa2iz+k81qibCPoaYitOlfihYlsWQ6uYw8Rdn77oCWN37ztv0CrX77W1ZmbRMbs84vf6cBrqKmji3p8n8VnP36dyA//AAX6h4r4CxNHEyLN873Bo5tP5Bo3J85MGw/WVu++w/FHNC4LTuA2TeY9QYkkmOtNFbjB7miYOBaPm54nu/W8rMVp/AbGPEefy5BEdmaOpET3PbCHEuHzc8r2/rYVzfa2r2OvW8rcb9Lr097V0RF8/BERAVO4gflXRf7Yf/IW1cVTuIH5V0X+2H/yFtbWi/M+k/aUw7llHhPay1LoLhBfy+lRE3JsuU4XTSTCMxRyWY2OLd43hxdzBncNg8uB3aAdXVI416AtcT+GWa03RtxUb9oQy1rE7S6NssUzJmc4HXlLowDt12JWedmSEBnOLupMVksBpmto2tkteZKpPkLGKgzHLTpVY5AztX2nQgnmLmAARb7kjoBuYuPwkjk8Rg6+I0tYua2ymTuYj2uWLbIRVsVNzaMtjZzRGwcpDmtcXdozZu52H9yWheJE2qMHrym3S0Wsa+PsYe/jJLNk0LFV8rJY3Mm7LtGva9m/WMghxHTvUJj/AAfNU6ZhwOpsXlsTd4gU8zkszdbcbLFjrRvtDZ4GlodJG1oZDyO2cfwfVvlbCv4h8dUcYsnqd+kKZrXNJagxuv6OGzeLhu87XMfBJKG9ozlEsMjHMcNwN9ureiu+l+MmT1hxPz2msbpeN2JwV44+/kp8rHHZieIRIJBU5OYxOLg1r+bqTvtsDtSr3g/6wvY27qKTKYV/EK1qmpqd0REwxoFaHsIqvPt2nKIt95OXcuPvVOai4T6u1XxkwGprEemMTRwuQ8YjzGN7cZa1U7NzTTmBaGFjnO3J5yNgNmg7qMxtigNX/i8L+2KX+u1T6gNX/i8L+2KX+u1bWD8yFqdsNCREXHVFlnFHhvZuXJM9hIe3sPA8dpN6Om2AAlj9LwAAWn3wA22Ldn6mi2tG0nE0XEjEw9v3HlWKzHM57GuIkYdnxPBa9h7iHNPVp+YhfRejc5o3BaleH5TEU70oGwlmhaXgegO7x/vUIeDejT/2HF+6WT7S9VR7cwZj8dExPlaehaGGoty9xvRvxHF9LJ9pPcb0b8RxfSyfaV/jmjeGrlHUtDDUW5e43o34ji+lk+0nuN6N+I4vpZPtJ8c0bw1co6loYajnBrSSQAOpJ8y3L3G9G/EcX0sn2l963CbR9V7Xt09SlLeo8Yj7YDz9z9won25o/dTV6dS0Ma0jpi9rq0I8dzR0ASJsnybxR7d4YT0e/wA2w3A/O8wPoXDYipgMXWx9GIQ1a7AyNg9HpJ85J6k+ckldccbIY2sY0MY0BrWtGwAHcAF+l53TtPr02qLxamNkdU8BERctAiIgKncQPyrov9sP/kLauKqPERgghweUkBFTF5DxmzJ5oo3QTQl7unvWmUEnuABJIAK2tG+bH19YlMbXWi/McjJmNexzXscNw5p3BX6WygREQEREBQGr/wAXhf2xS/12qfUFneXJ5XC4yA9rbF6G2+NnUxxROD3Pd6B0AG+25cAFlwsq4nctTtaAiIuOqIiICIiAiIgIiICIiAiIgIiICIiAiIgrU/DTSNqQyTaXw8rz3udQiJ79/wCr6SV8/cr0Z8k8J/D4vsq0otjtGNH+885Ted6re5Xoz5J4T+HxfZT3K9GfJPCfw+L7KtKJ2jG8c85LzvVb3K9GfJPCfw+L7Ke5Xoz5J4T+HxfZVpRO0Y3jnnJed6re5Xoz5J4T+HxfZUzh9P4vT0L4sXjamNieQXMqQNiDiOgJDQN1IIq1Y2JXFqqpmOJeRERYUP/Z", "text/plain": [ - "" + "{'langgraph_step': 1,\n", + " 'langgraph_node': 'call_model',\n", + " 'langgraph_triggers': ['start:call_model'],\n", + " 'langgraph_path': ('__pregel_pull', 'call_model'),\n", + " 'langgraph_checkpoint_ns': 'call_model:ca83e792-dddc-7f99-c8ff-4e8c166106f6',\n", + " 'tags': ['poem']}" ] }, + "execution_count": 10, "metadata": {}, - "output_type": "display_data" + "output_type": "execute_result" } ], "source": [ - "from IPython.display import Image, display\n", - "\n", - "display(Image(app.get_graph().draw_mermaid_png()))" + "metadata" ] }, { "cell_type": "markdown", - "id": "2a1b56c5-bd61-4192-8bdb-458a1e9f0159", + "id": "a3afbbee-fab8-4c7f-ad26-094f8c8f4dd9", "metadata": {}, "source": [ - "## Streaming LLM Tokens\n", - "\n", - "You can access the LLM tokens as they are produced by each node. \n", - "In this case only the \"agent\" node produces LLM tokens.\n", - "In order for this to work properly, you must be using an LLM that supports streaming as well as have set it when constructing the LLM (e.g. `ChatOpenAI(model=\"gpt-3.5-turbo-1106\", streaming=True)`)\n" + "To filter to the specific LLM invocation, you can use the streamed metadata:" ] }, { "cell_type": "code", - "execution_count": 9, - "id": "96050fba", + "execution_count": 11, + "id": "fdeee9d9-2625-403a-9253-418a0feeed77", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "[{'name': 'search', 'args': {}, 'id': 'call_lfwgOci165GXplBjSDBeD4sE', 'type': 'tool_call'}]\n", - "[{'name': 'search', 'args': {}, 'id': 'call_lfwgOci165GXplBjSDBeD4sE', 'type': 'tool_call'}]\n", - "[{'name': 'search', 'args': {}, 'id': 'call_lfwgOci165GXplBjSDBeD4sE', 'type': 'tool_call'}]\n", - "[{'name': 'search', 'args': {'query': ''}, 'id': 'call_lfwgOci165GXplBjSDBeD4sE', 'type': 'tool_call'}]\n", - "[{'name': 'search', 'args': {'query': 'weather'}, 'id': 'call_lfwgOci165GXplBjSDBeD4sE', 'type': 'tool_call'}]\n", - "[{'name': 'search', 'args': {'query': 'weather in'}, 'id': 'call_lfwgOci165GXplBjSDBeD4sE', 'type': 'tool_call'}]\n", - "[{'name': 'search', 'args': {'query': 'weather in San'}, 'id': 'call_lfwgOci165GXplBjSDBeD4sE', 'type': 'tool_call'}]\n", - "[{'name': 'search', 'args': {'query': 'weather in San Francisco'}, 'id': 'call_lfwgOci165GXplBjSDBeD4sE', 'type': 'tool_call'}]\n", - "[{'name': 'search', 'args': {'query': 'weather in San Francisco'}, 'id': 'call_lfwgOci165GXplBjSDBeD4sE', 'type': 'tool_call'}]\n", - "[\"Cloudy with a chance of hail.\"]|The| weather| in| San| Francisco| is| currently| cloudy| with| a| chance| of| hail|.|" + "Writing joke...\n", + "\n", + "\n", + "Writing poem...\n", + "In| shadows| sleek|,| with| eyes| ag|low|,| \n", + "|A| whisper| of| grace|,| as| they| softly| flow|,| \n", + "|With| p|itter|-p|atter| on| the| midnight| floor|,| \n", + "|Cur|iosity| blooms|,| they| explore| more| and| more|.| \n", + "\n", + "|A| stretch| and| a| y|awn|,| in| sun|beam|'s| embrace|,| \n", + "|Ch|asing| the| dust| mot|es| that| dance| with| such| grace|,| \n", + "|Each| p|ounce| a| ballet|,| each| leap| a| delight|,| \n", + "|The| world| is| their| playground| from| morning| to| night|.| \n", + "\n", + "|F|urred| confid|ants|,| both| sly| and| serene|,| \n", + "|With| silent| mis|chief|,| they| dwell| in| between|,| \n", + "|In| the| heart| of| our| homes|,| they| fro|lic| and| play|,| \n", + "|Oh|,| marvelous| creatures|,| in| every| way|.|" ] } ], "source": [ - "from langchain_core.messages import AIMessageChunk, HumanMessage\n", - "\n", - "inputs = [HumanMessage(content=\"what is the weather in sf\")]\n", - "first = True\n", - "async for msg, metadata in app.astream({\"messages\": inputs}, stream_mode=\"messages\"):\n", - " if msg.content and not isinstance(msg, HumanMessage):\n", - " print(msg.content, end=\"|\", flush=True)\n", - "\n", - " if isinstance(msg, AIMessageChunk):\n", - " if first:\n", - " gathered = msg\n", - " first = False\n", - " else:\n", - " gathered = gathered + msg\n", - "\n", - " if msg.tool_call_chunks:\n", - " print(gathered.tool_calls)" + "async for msg, metadata in graph.astream(\n", + " {\"topic\": \"cats\"},\n", + " # highlight-next-line\n", + " stream_mode=\"custom\",\n", + "):\n", + " if \"poem\" in metadata.get(\"tags\", []):\n", + " print(msg[\"content\"], end=\"|\", flush=True)" ] } ], @@ -445,7 +524,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.6" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 4bcb4199ab..584f15b447 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -145,11 +145,7 @@ nav: - Streaming: - Streaming: how-tos#streaming - how-tos/streaming/streaming.ipynb - - how-tos/stream-updates.ipynb - how-tos/streaming-tokens.ipynb - - how-tos/streaming-tokens-without-langchain.ipynb - - how-tos/streaming-content.ipynb - - how-tos/stream-multiple.ipynb - how-tos/streaming-events-from-within-tools.ipynb - how-tos/streaming-events-from-within-tools-without-langchain.ipynb - how-tos/streaming-from-final-node.ipynb From 968d6c37630417a84cfc8c7f48c6ae3424388753 Mon Sep 17 00:00:00 2001 From: vbarda Date: Wed, 29 Jan 2025 11:51:09 -0500 Subject: [PATCH 03/14] update --- .../streaming-tokens_72785b66.msgpack.zlib | 1 - .../streaming-tokens_96050fba.msgpack.zlib | 2 +- ...4-6020-445e-8ecd-ca4239e9b22b.msgpack.zlib | 1 + ...d-7be6-4c9f-9185-5e5551f848f3.msgpack.zlib | 1 + ...9-2625-403a-9253-418a0feeed77.msgpack.zlib | 1 + docs/docs/how-tos/streaming-tokens.ipynb | 142 +++++++++--------- docs/docs/how-tos/streaming/streaming.ipynb | 6 +- docs/mkdocs.yml | 9 +- 8 files changed, 81 insertions(+), 82 deletions(-) delete mode 100644 docs/cassettes/streaming-tokens_72785b66.msgpack.zlib create mode 100644 docs/cassettes/streaming-tokens_c9e0df34-6020-445e-8ecd-ca4239e9b22b.msgpack.zlib create mode 100644 docs/cassettes/streaming-tokens_e977406d-7be6-4c9f-9185-5e5551f848f3.msgpack.zlib create mode 100644 docs/cassettes/streaming-tokens_fdeee9d9-2625-403a-9253-418a0feeed77.msgpack.zlib diff --git a/docs/cassettes/streaming-tokens_72785b66.msgpack.zlib b/docs/cassettes/streaming-tokens_72785b66.msgpack.zlib deleted file mode 100644 index 9d409b09b3..0000000000 --- a/docs/cassettes/streaming-tokens_72785b66.msgpack.zlib +++ /dev/null @@ -1 +0,0 @@ -eNqVeAdYU8vW9kaaiJRYQKRKVaT3jvQqvYYmKYRASEISOtgpipQAAkqTIkoX6R2kV+lKEREQFCkC0tsf9Zzr+e537/883+y9Z949q8yatdfMmuTuC284Do/EoMnykWgCHOcMJZBe8MS7L3BwTy84nnA/2wNOcMXAMnW0LDK9cMhxsg1XAgGLVxAV9YDjPJyRMBEk2l0U6YEQ1Te3knW2lsbCNDEIuB/MA6IjL2kr4eoKRZuijZHqsvoSRuJQNBil749B6Ev4YiEeVq5QJEzexUYMpW+KRUM9tH2cNdQtzTTxBhpW8i5QSTOSrJmLjZ8rxERX7aqNuLw/TEfbD6Yt72KiIe9jghLDGrthZMAeKD+opKmBhrU2GmwtLWag81dritGH6ch7Q3SvY/5qsRoIlIuNhJUXmKTDQBsvB9WU/tf7P3VCdLT9YWoY/X+OqachRjBBqruCSfywnzbamHlDJHz9/1f/X3aQ+r0MLaQQJB0oiAdpHv7/m/aXXbIku9x+jgkVM0OBkeruYGuwK8zaV0xPB4yF6PjI6EnY+oHd1D3ASF88yccoQ2tXlLM1DEPyta+hmx5JrxHe1sbI30zHyuOnDBRt9LcsASKp7morgRKDW2B8/m2cnzL/GEPfw9YN7GHrg1FWhSA0MCgMTvmSjyuSAM+CYGB+jS9c4c4wUrjcz7XEw3HCagg4mkAsx/qR4gMt/Fe84EUlRCRJd5EaFArHEoS10FAMDIlGEAsQ/kisEBcM7oJyJsCzf5OJmYKigrkaGDQa/ivwiLnucDhW2BmF9Ia/xMHxWFIswu9l4wnOBC/83SySKnhPxwsPOB7vjIBnGBv8bVHUf9GRYwbHYnAEYQsMcWwzgBuOhmExpDjHcyvYBXB74VDcCty/o9le1F7UWQQNR4lAURgvGMlGHFwEivGwF8X90mAv6i2lilfGe/igjNVcdfkk1N1cjPWkoJIWVrKmVtYIeVtzI2s4GoGEeVnISribGIpD8d42plYQvIWuGIpPQlvSCIM3x3iBSRBliHDVFTO0QONh4mBpL6iXujQWjnf2FLO47uwNc0O4+UPMDdXEXE1wsvpmmp4G8rpefi6uEmokURNNfQsDUz5JTdLNHeQgxI3AYbywpFlAXYRJxnMLcXs4+zqRfMOtICMmJScmFlRIcgyB9J2EDUnWEVyJWbIycpIFGs5QV7jwTxIOgyIWkYSESULKEjKyclJiYkUa2sK/Ocx/OZ6YqatnUWDojCcIk1YS0gUJhxHrtHFIIS4JMS5zOJbUSEhxicsoiEkoSItx6Vy3yNIkfWNinYWrF4lH5h88kgqSsgpi8j95an6GAB7/txXCaigUxkfYGIdEINHEdMH8vw238MPCiblID5KBom5YOCLLyhnnR6z4zSjE9W9xlkmKSmK2tJi8jLR8NmkeZmq2xFI5qLyks7SzuJgzFCouJwkT1rI2K/hL0MwZjYDjic8hfgQ4PtscjiNtiMTcP1GQaaRlSBxTCuDGe/2y18nlr02SW0FMiPt3dDgRMP+/b/Br9dwhRTGOZGArB/nx++NpgFFfW08bICMDADLSBRzPkL3X09BwMjEz1tYz1PpN6ABIhUIMADxILjLTUeeysQVzUU//lvhZnKF4LPDfC4lra/Q375Aw8H8vNDA4Hkpqv5MeXhxpcJLKcyR8DvEbX/6JIb+x0k/sQ8ASSNjkJ8ZZmGmQMIyELyP+gSH/wFAs7if/IxJW8kB5Qf/YDZyGoy3NSe1J0sMG4AEzQAdQ/zn/3yyYDACQ2wQA8ug/fZAnAFARDAAXxv/08T4DAIb7AFD+9k/fH59gnXHOv7ooSM8JFxcAWH8BAHS2AHC2HwBO2f3tiP9iG9cv23QADOlCACgATurRA9AAFBAhIQlADBAHZI4nAA2A/MSJnzepUJBuypOUlBQUlKeoqalOnj51+jTtKVpaOvozjHT0IHpaWkYmRtDZc+fPnz/NwHyB6dyFM+fOn/uphIycJENBSUNJSXOOjpbu3P+5HDcCoJPALjBKTsYNnACRkYPIjlsAdtKUyMl+lb+nS0lFTXGS/AQZDYlsxgiQUZCRrKc6fYqagvaXB06QpkAFoj5z6az4yXPcaqZx55mYJWQsrZxjBwaHeCSl1S08ve48LuU3wxPSX5fQEIu/X9DUMofA7t7PaGom6WQl+6njz3g/lf7SSU0iSoNI4X3iBDUlFRU52d9EEPklijPipp7EgXNqZs7plGfPS94p5uaBxDR9l1D/tHY8DpwmMZPYQIAqcAxYDIDIQIygXxVz23KH+Ah/mYt2syNTQr2OjxT9ejb37rV9EUW/5CDqdXAduOQYyJPqRR4DZ549Ca/JGxeCfa6IbFPCMAkKJMICbhUojvtVqG57cJENTH5oLk8NHYpj6XQijwiuv6lPvBt4aPTB6NDnqko2u7y1/K7X0ZqRw0qUh41T5GX/bpXSrZHgnZnQ/JJvxm6VTg+++JeoZG3lBe+khuZJLhp3bvww0inPF20/pwxPAwe4vTnw1Bi13XxRZi/a8kPZNs0swPLNgbHGyJNNgbndPRaO8VcNbCab8UHI5TzatVYGW8Ro6gi24Wz/pkuQ3rI17ZoygxViOC124QvsqMLCOfka2vGBQI9QyTczkH5nJkPEMjFyBW80fbR5kZANarwX+kzti1ru2xys3jNPrC6ipfg9bdK9CajMg8KJ7LdRaJ2Iar/rquIUJ+KbUmLFD49WJWxBGs6STHwlIpICrNxdjU2XCMLcrOKxr+KFO7kwAz288WfIeJnkh67SWfSeRVpoaploalJoXvtTWUaYlhubfD24eptwpt7w/pLiLupebFyQ0KuaY8BoygthmdwcaO6I7I6kLnla1nNzGi3/cczww9uk536qC/DiO0c+62ml2TL5HWFl/YKKO2OZLEWR4Sz3+DNREXc203OUiO08T/ref5S0gUC7/derGpfnrYPnexZKLmLe0F8h0DNYDaUJP83/YExX6ylHOxLAyFvDtpCFF08vYCukXHFpDa0Nbg5OPasQEPbdnSa7yIeBKaXC10A4Oahc083KF/lkUIWprIinS2y1b2BK4Uy+v0GUP7O9Zc/b0D6WutCC2YT0Yv1WZMY9BMe71/EydT1jhWO9SGp0OHsrzKsvZIjNdQh867pLxih+Hrfbw39tNoWPZjZlRrcHn9ZU7ZB8LaL9psuSdlfJ7aEnLDvhqV3Q77UaNREd2P1LD1mUHSgTepU/8fAuteD8TNGDftfcLyzgqU7cMxudYt6TpE63zZHaL1wED9qlO8SoPap+zzYhEbLxIODEO6ZSXQNqR++TfW+xtPChsT3/F7TmVzXalIyG/GbXcl+PJnh0rGc8LggwNlqQOr9waJmxgWjs1oJ8uYE2TmzuH3on0KxF5pE5MzgS79uCgI6TsR7EDhMr/KcI8dnhNjeuwp3TY9pIR9gR3I+PZt3y9nafNb+zfdiaRCvcPUVzX1PN+l0N9Mtgb4nL2/AytuCpRMc1tt7ddp4H73/MuxsnglwtI3kiP+oba9U8wZ1p1rtN1ymhOxq7d1vEc/eOlVexla9Fz2qq7S3JKysgkPuzfwsm07EDIb8l9zwFDYqIAP4Q74gP5/KxeA5VU3LPBdZ7/TnnrLJzsiEvJ04w0OVQJvGwxIAG+sRr6otkY8XY19WNGxA7FUEpxZa2gWxylkpF8UZsX/14A8KTtyE3+Fq2Y/wsZ0Xvp6fmzYydItrd53B5wFrlXtuHXEVyEItNMtVlGO2slVNyOCaoaNtufByGM6jOlNidql3NU5ot2nbDPQ6aoku+jH/gQ/ejJNk/xlPszK2dvCNFdmuGqDz1CN+cwbyzN+7Wnj4deXmcHNxWSsqBAHn/M+Ws97uWqXuuN7/+A01xRg0aaG+mn3cavx712HY1O3TlsXGozUFhTmZS/FZOD/VQr1M6M1SIy9ukb17yVlPa1Ti6o1tixj6hsz9iBupyBb1zp+fvv7NVixkGq1Ind5xscZSOTj83iKPbwkg+NWrGDy04qAyiAnPfBkmpElL/oFWG9q98mf51x4B24VlMQ1V5/BGIeszKQfU54QUtNt65GbIGtjsDl+GH60ILaKJDKlIaAdlLBawHjaKJA2d3N438mgcmWlQ/cXzLDgltHaw632mnor+kqaDIwsrw6kvfBLpxHdxcfgim9vsRN/ImoszpvtcxkG0cr0LjxQ8uh/QwFGk/82y3fhIJe75CWZiyrIiLGQSPEivdUx1GJgtecwt5vStSMdENC7/4P8LiT2W7StkhV2DapXcU+7nmUEXqGEg+Bt6muxooT9cftbpFbO0Yx28/uxl3DKjT0fydLf5bxeTXZFouYLGr9nHwUQX2GOhVdnVXmi6callMqT4G9CU+zGL2hvpdXcNuh4Wd/M9VeMKV2ILgdps6tFFXmTcLx/Tg7KsNQCEm4Da7Hhd9bu6hTi6KY7CehwUs4IXyXqntok5KipHoD+JiZkjO3/GVntGaRaTqvf7ykLi08uqQnPobPmg8QDFf9VrT3uRH79db4O12OaG9tXDHyU+3Tm9S5aXfizUzflAozIYy3xKJF5vfuq/6eAL3quqw/aZDNaZ6g/NOQvBIvaT7o/r49HTYczN3n5IEichO6vJMvri6R3OMFckJVQWrHD5+5SqxxY9HvL91U+z0nO7vGuzkielpLUSyMqqFv7wE52BIpq6YXjAwabr94sHrg95tcIw70/OFyEsNcGBcpeCGyLcF3rilstVIrO1XXosohFfolwuqdkkrQ2MfnPeoGEbn90t/U3209LZ8bG7eubdZVb+s//XQWPlbYTtcUEeDl0dS/FUXPMS1iawP2g32obgNvsbjOUW/+Nh1c+jimDHN3e5T3MKw/+R2wUBTRdddlYBStY/9Hm9OfbWvNdvhOAZWh28lBXuJpHJ2REpuPUd/AoFMf54U6K/KR/XRIji9aTOehw4nv4tW0pKkfxsx4SP/Ip19+sdlxTildo9b2eUbraibK1ZBaoaXkUecX9OWCo4B4sMfPt/zlq8Er4GY+U5wc3Fz/1t1KuFcPXXD/ibn/CcHg4SHTuQfD/eiNrCFz4ead0FtN4cLJnM9nSxMBgKlh6vynycNfxnOvJqVxlIRIem9fo/+kCKJ2GV8fetjxoYLj3TLIU3qaKWgR3f4qJTyo5WrLE/6kj8kFrRfjihkIHjMeTIcmIg646naC7tHdpeYVM0/oWckyrgbImponxWDXVY6L3wqRnLmIHC+8l9qHWX5D/jpmeaFZpYhE+58uA93hCTFpJoVH9Wua81VdIqseyslZ63kWBj26b+6ePGbQPaim7J7hhdTzsiOoeO91MlyacpwSetQwa88wpdeowan25RXblDm033t7vGp+/5Nb02H9fEbRNrjwErac9lJV/h6HWRsHKDTIg9tuL10J2jvJ0cesN7z02P1ueLI+J3TAqnsMLubdGHpJnUL/3MEdVTP3OtoGNuRCScPujKsWcOe3IZGae6Men8phqxU5HZHtw/5fSlE4BlLV1kh/Y4MP8q1lvaO+ZZBsIKYFOFU8RmxPtaptzdKXwBeqOoKk8+ri+EmUFw/bF00tXTNQN/vYgDPXNkSQejSNLVikbgnb1LC3ezwnPtJYbKfOlyErnPkd/cvkX7S2YQSy8kYxOs+Z5u/mlKX71vA+Vy1yHtTIGxA+9xTbGZXfM/Rdwlyae2Bcbhux0dW+s8TDS66ayXdoQ77jU6TOrRSbeS2c2VWihcX/Woj5yeLbRO1JMNcRcU2tgIXHtc0JCQuT2VfSWQqo1NtPpQ8fNywLm748k9ghSuzHwju6zsHamp1XvsP+8rd562id1aPDnV2K63Ndha/MS7kGN/PHhMFeyW6v5eac5bbBk3I2Kph3wOjNXHo2Z6+JMVEbYX5zGwKPDKAGY7RkKa13Bc4mNbjYSP7ARogvHIRfPNQlPnBkz6ITAtmuvz9a/g5gddjq8QnTuDn21+XygPmXKxHc4L2unP2Mrd1zDpLTVK7EfXa8CC3hI+FSi753fDZlM1SNKycZi7/aFS/ONVbrmyC2lCWNTKSncGXl//FYtQHdgOhL+nK8W8DH3XtjYg2GMNLPvJI1JcZrA6Zb+qsRD6Pdbt4K3QPsX/11hzk6HR5wK07VWrHQPXz7W+Mzb5IbLUI7xzSrad0z+DK/XiU+5U5wa5r0VUZOnXLSFeukjj6afko8XmynHI2w562zkfZAii7agPbxRb7naUIw3qVgj2m9Brt4euptNJzZ7Sk0GS9AWyy9EgKRh31U1/sgzX/LZNoNT3tlqWNTB0ogiwRS548vlKSioycpiquuTNclYnT0N6vLD1xzefMRVleBllLa3hN9fhLaXYRRaOpZzMTBRLWHSxsh6E8bE1jstFnewILpj7cM/BR7m3Ui64XdnGiazhf+e3CFwg0cXx5afei02cQz17dZAsg30juvcFSynmq9FnWu/JaofvKratzrmx9yisqsf1eytYffuak87e0+wQK4g5U9Nv5jYZoOnU65pcvrUO/yIb5iO1OvH4lOB+XHMCXs5hkyypaEPttdukzlHw65Np79t0B+qCKDcXkm260F7m5RPk9EY2K7K2qNBcn1WJ57Djvennb1F2zRq2W3eSmCByoC5GkvY2MVsStmH1oOGn/0TbjVEjyqDVLXLix28P+2TLypwtq0pBZY97RFYNpfHXZPj1Cfca+r4blgqKO3bBL5pBNr92DBJaEF+yLZkyVxft320tDJbTlUPPKhSTP6+LtyZAUJ/KkSuSFqXGn0ZmvMZ8EojUMTLquBWI8lV7ZAVSlaXz5iYSAMOg9R9vx4M//MzdTUyHE6emTEE/ls0Xj5Xge1hrF69St0LnSuSrLKiZw8hXQ48Pc9dh2akEh7zvnrgc8KewcOUBfMPSA8X1+IyNV1Zo5tcF+8LrR+Fr/x2Bd/g63CR+Bu4c2xQNPvaqWyMD90Tq8MWFP/ePjy+68XZSJlgIWmI3/tbEXsXBGvLtUGHsFwlYgdqGJU1nw9bUZvXl2W96CRIKK3vk9VNBmMHa+GDT/Zac0dqyglNjTmWsIIUZBm8c1QpO8b4N93J8a2NPqUSgStW6USMAuLL7LKNKRpmXlSw60SpJ/UBgw3kkGnGlPnAIvSGetpzRy3REkrXraWXQW82Kv1bD4WIEkO9urrv553Tcywljlk1zRak4wNc1+WRTyYN5AYyrVQKVdg73WMmdONN6fga8AOjljv/1mK7E78nOvuHxLVZ/gVsnyjQ2D8Eml1+3r1T22QviJqvXEL5lpNTWZebINCxO1BZT/PAnpuLHrPRiUC2BBrZjFM593SiivI77q4Ap/0lQwH9Dh+qHPlTU1686CGmfGV9+UwJJIiUC7hS7w457UdpTLwaUTN3zWwVshlbIbamQ2VqZvOERJZylzm4YHTW75fMnPHxXsQto1Uu9+DicvD+gZKCsSmcNSfnrmJ3xoYP1nUao1PW5NWoZFG89Yz0y8Lu8Y35au7cUn+oVaqUZTkxXzVJY1FXI+ckS0z8XXPVxN95ftULJClFatlX0ueFybdbedLllpNpGaqpKRN57sU4Tdw/jS0yBsEg17/lzBU1bbRA8J1p29ProFOBoy1MvfyEyNnuphnEJbt7HGs0pHDdt0SMjlbb723QeqWQnzqAFdMq74uqx7w6D+3QS3Nad9Jd/HMo8/OZprffOpMbZ544B/mZyzZMcrEWPfxoxiEIq4uX/LXtRyP8d19bFclEPUNzv2NNYmpecyKd36ghXV8ZG75mGb749Kt80GPgp6ctRwjqb8oiuUVDHkPsV4pl0bcZcPBsvbh6cUr/tp+VL3Vq3Flh/us9c87LdSfjTD9KSmLXDFLHBLRlAKO3LZPNXANKSbN6ZXOFKjwYTcnbx4lbq/iGMRrPjZiOcddG+oZNZXIO5pSRVSy+rPkNli4+HnX2bEGDrqZ2sRozkWS8qvDfrf9tOb2M4dufdA0zHXheeusBhRUyv4BHdXZWZLwPlCg9tvem6kiil5byj/sLutabrHoUZRs6HG6mf0IQksE/HgOUSbg/OIdhQPniYM3xVKdUy5EjbP2Hl5+eSPkIDl+qC4K7MyEdTrQ6qEM4ePgzXAcrThrJ9rE5RX+N76CeDsZ4dTDj4HMedmvvcFcUZWPVkvvXXGkTWqRiZi52t9keh7y0I+t1R2Tx01BCHVSeQddH3ECax+Vd8Qtom617ndi3UP7esvkbLe4YiYyUsmiJ6ObWQN3nGknitzzBqVdb6nzNvXOvuUmzht5V4R2HvpRdZ9WfQMOYPxlQDJD0naB4SP1glLQvJnp/1lHxZ69tfKcnp7sK4NBTk6varfrKzIXFaScjtFPDuJOnvyGAhopKVWJ79DgWbwvaLSPLCfmzeU7usfecVWyTBGtlx3ivzpFnvznRWuovENx5jhCoJw/QRrzVf51qFZ+V1Iws31gcn23W1WzO7EdpTGJ/x5aSrBcu1HGW7lLH3tt1pA81wrJzcYrZOHSk/9EMzY8EfANuUUTUG4JlJC0AmLWp5pgvIqDnfOBTr2BBDiZZqkWiOPgTezJyl8GA10uaILulJaGLqqRbeaj4Ho8vJSTBKnk25c2zv3C+/HL7aFlNLdJr/BvAlC/f65bWK7Pok2p4m97W11cPNiJLHuNFH/e7v4FHFYsXCf6a5CzghZ3jHAHHxYbHDonV9p+w5U5+Zdyqtp7+VEW56knbzltHopYlBxk6zhGDDovrD7ucqyfMFHOQk29mTfo2c7FcG5/XgxvMzOP3kzbZU/YqBmE1TsaJ3uCLZMMmr2ExrmL+35eNP61XrNgfpmfdNIOf9BQ4y3Akh9UdgpcrCtsn5iZXm3SsmAHfLRe2LDW7whodZnucGh5mHott3QQdaWbe+diekVjRXmF8n6V5Kum94UVKiMajnFJjN074qv9LztbJ6jQE+HHe9bFV1C1KV8dGPMjDdZ1uaK7YHjGUIbp8lbEYWU5BCOUtnTFx7VWj9LFejiUSoRo7cuTS3A6wT4L/mcBTeV+TY9KrCmhHw+3ccxJ++G++zJ9KwQ/ij64tGoDF66aQVJpxixPZFxsOhwPm+zgsGLilWAIzU9uvoS3DQqsI0/pOOdUidPIs3HhGenv25sFwjpr+XmIk7hyq3CknpfNDx0IHq62BuZCxtYfgpJ82IzL7kecxUyBMt/h2sIejple9FP5/I8dSJqJg/7rc40bcaYvjaAGnodguNQvMq9pamLKmkNTxR48IyqkoClqIEuUFxlqv5kX71g4VSsnPis1X7IChslzNvTCXsqW/apBgUmZ0gL2vq6YHq+tLw8XDVrA5yB4vmCDCtQsJ4JfoCypS+EMoC8b+4/44v4WLvccC3PqX505EmSZtvSRvjHr43uL3Myw/0D01oRqjdTOhiOAcq90Ic7tGd1znL3btQ1ZSYZz12UFegzZGUYtVqtb17TkRKhNZqWuT6vrPaAN/Hb9fOVTSuuiXJMUUmpAjWbtfmtyAOqNiWdtO5oTW+XyCzNbeVXoWc+beC7to7Kz/dulRdBi+y6JjGD34amRgZAgQekBGVeeWfXWuvo/a11ppHWf7yMjoZvqMRuxTVXwTnOjnebfRFhdOr+anoWh6oyYr7O+0byHM42IlJ4SjaUGU/lQ3ev+86HZ+wOZnLvngl7qobYFshqT1pgxhm7wS3LIhGvWyX5EXLsfgaZwPI7iJXfWAwjS5/IoqGVAVm2u+v4mLBfTI6Kepvg6XZhBUtotGRZ0At1mnmRG4BW/VzkbTRk9nuMvJI2fT2PgU1VwesniolR7vHCr0FWrdd7M4UjkgdYqa/Qu9s7jEu6rHhzTsYHz2iOZ1IXXakw0Mp3n11yXp3lLYH1F1uuv7aZ7KwRGShyH2uw6bjUQlg1STwGqHvNs50bF1GOdmd9ws5tyAv1jk6iiqEFrMt0qVTxoXqsYBYGnnLPA06rXbtTR5fWRtggnYs+X0+5lPXyvRSvPopklQ0PWVW0b2/iTa/LlNd81dEu3oyVncjzv1qfMMtvbOOpqpxdXO0oC8FaR6oc6Qfme449mAFxRVvMzYvLVUoXzS8a7aPoh/OmGPbebMmsZMReCb7FWOD29GnKAEis7Q6Y441g7b53cobwdmwCbnbVYj7jk0rhSCCs0c0d9PXRdTYRjTaeHmiAZ3yZeQn+uvwdV2do8nLnAs8NGVfFlz0eD8mtj0Rp9yEJe8vmi3+wnY2qBNMLlNTPzUcr3lMnZAttH6t8P7Ti1Xcl71h/BY/bJ2wORQEAHPSd3CPc+2Z2uLv2oKeUvW5L88Q2h+XDHrz8knsYMRTR8h28xTHDNjTyYv5SEHMm/qksZY4jZ1yLed1lVhqX4OfT/uQ17fvDl6anIlS32JyqbVUGNy0C8FTQtQFxFavU4PSQjMm6th+YFs8goTSk8r+Q0+xT2ZBC0+FSsWG7hMqpiJ1I0eexywJ/ULsUhs4+M8fV+eUCVWnMNfIcgZ4KUc861jU2JwOBcSf/Sf6BTcd3FcZmcRUv18L2bygdvp28nIZ2PRwe+TpcKl7Aky3qnqeu5CNFIONGpfGv6VT4OitzHAMyzVhVxR6GdOz7h4mnHVBBF+5Lu7+SkGHQyUqTF8hRwek5eLZ1QN0v9LdJUZUVsALUWLKbCSlDpXs/xjPW3KAwL2MyELMw6cQr6MOoKvilYYLhSH28MjcKdIErlDouGmTOHL2WJOGXEdF/nSagOmD0iZpAJaDHdeJ3crEhJRewaA9OwkV6Ce/YTtf81IfyZnzXr7Mc5T+yR84mZcrnzs/Yyg4Gd5RjyMQswpzNVE75rc8zAoAp/9xImUArOOMjbDR6Oyw88uV//pftqvn3Id3x6E6ZhOaujakFk7DUw2Mg7ytf4GHXD0SnksD3nfqu0oz3yxcGjjb6zP2m8gIef3kb42v8aS0obvvqQBrv97TmmX3BwQfj4rX4PoO+TbvJIu3A2De3GMKXXlQ5PtoVuH/EyTxmFWc0EDGbinP0yZ367C85l3p2J9AJbvjpx5GosfpOse944vtUNbzgtff3wr7Q0JF7cT0sg0KsmXJzEmccNTVtTI7H/h/iwd2T \ No newline at end of file diff --git a/docs/cassettes/streaming-tokens_96050fba.msgpack.zlib b/docs/cassettes/streaming-tokens_96050fba.msgpack.zlib index cff0d098b6..b9999a059f 100644 --- a/docs/cassettes/streaming-tokens_96050fba.msgpack.zlib +++ b/docs/cassettes/streaming-tokens_96050fba.msgpack.zlib @@ -1 +1 @@ -eNrtWUFz28YVjtube2gP7R2D6alDQABBACQ1OkiUZMuVREmkHFmWhrMEFgREAAthF5QojQ51e+8g0z+QWJFSjeIkk0ybpE3PPfTYi3zIP8ilv6BvQVIiK9tKMs4kKcUDCWDfvn3f997ue3h8ctrBMfVIeOfcCxmOkcXghr715DTGuwmm7A8nAWYusY9XqrX60yT2Ln7jMhbR8sQEijyZRDhEnmyRYKKjTlguYhNwHfk4U3PcJHb3+Z3tQzHAlKIWpmJZeHwoWgTWChnciHswRfCowFws7GEEP7HghQJ1xJwgxsTHXCihOBaPtuFJQGzs80etiEmarEssiZuEy4bwVIVfymKMArhhcYLhnuEgAlwgx1UpssmfEeL3TWHdKFvCScIMOld1eV0WDsUQBZkAxSi2XD5sY2rFXtSXECvI9wVGBJrETh9GU+ZyEYphLpBKM0VRDGTFzMO9W2A37mZXAxPAci9siUdHHDmw78XY5kb2RTn8gShp7mCLgejR9tGpi5ENi/zx2CWUpc9G3fIBsiwMXOHQIjaoT99vHXhRTrCx4yOGz8AVIc7QpmdtjCMJ+V4Hn/RmpR+iKPI9C/HxiR1KwvO+6yRuyfXhM+4pCRwdsvSzadoNrSpYMr0wsdKFIAoFVS4UZeXDfYky5IU+BIXkIzDqJMrG/zY8ECGrDZqkfoCmJ73Jz4ZlCE3fXUJWtTaikjsqfRfFgVH4ePh5nITMC3B6Wlm5vlx/8Go5TVZVufTRiGKOKH0/+yln3x7564gSzOKuZBHQlb6tPBuQ5eOwxdz0qVYovRdjGsHWwL8/gWksoU+OwTH4X/887W+Rd6q/HXj0yzd+dTwLTkq/qLtJTsgbQg1HQl7JFwRVK2tmWSkJ95bq55X+MnXukwuB4X02gTv8SW8zTAqwMWOK2VTCHKn4UT1GIXXAUXODoDi13CRsY/us8sJw+IKHA8DjeGBPSng/IhRLfTPT8w1prXdaSAuzH/diTyJxC4XeQRYb6Z+5l8EIL/ykPwy7gauExaWApk/zqv6sPzJwwBkAVSRVkRT1M741LAg6bnhEYgCGLTiLWDe9yAVon0fclKbqmqEoyiScH5af2LiWNGdJAGvSSSGKsU+Q/fm+BEcB9r3AA69k3/1zDgJJhcnKp9clGGnjkKbv6Urv849hkRjzFTiMS0XHJfj8/cVCA10FLlMy9c9HxcBHV3qeGgH99Pp4X8U7Cj3fHwhLnp1e/BpuGnnb1LBZUNS8biLHaJYKdt5wFL1QLBnNoq1/UJmXKshysVTLwi89nX20PL20UDmrge4KIW0Pv/X8zk8bDctpNIOpaONAtoukTb25B7src3OO3+gqb2rF6vx6frGzM63O1Hc2F5UHTr0tqWbe1HSzmC9JqqzIqqxKrc6qe1/Rgkpjfaf6oBVtmmshmw1mq/kWRNxaqbLa9vfmDI+tzy9uajVnYbpgtlG7PTNn3ivsuS1TPug0Xa+1R3T2aNmQ26sVPT8N/oRMMTUxKUAkwjFJp/obRIINIvHtUSgrg+0xKdhZFEzJoyfjpHAfElk19LuTsK8gnDD8woFd8xieWiYhvvgTcJB0PHuq4dOFanvVbxobu/pGfb1SNPzF/CbaXVyrdjc2cbWz1FyJO1ZiLg2RUMyrktLnwVAKxSx4rkz/llb9ZUMa3u9SNUtE4MeQ0NBznJMajmELpWeWTxIbDvkYn4DP16YfpZ8UrZKGdKQaSDXVkmFJc2+uZTn6dye95PP8Z/+2EUM8K3mQe0Se0C1I59L0jNfVE9M8eLjbraz6syWddvbrwd598BkSc4OE1JshX5UAcnawgIAFBxHj+eyKn9wgm48mc4nHLsygXQq5u+GAWTiOwDpQHya+D7pc4lk8jUL+9kIb74tlJQdp2WdILB/2qwYRwekCRyxMy10VHD0FPP83LMjb/6ujBxoGGlWycW+e6ZvgouW6szkTLc9vBKsPZ0BZLwkPVQxDBcOgXrgsF0QUt5IA1oalREjY2znI7U5Ckd8z5ign+qQFp2GTDqwDxB51G0AY5Sozqe2ju3d//J55Ke/DBA4TdriVcXZL0qtI6pWHtzTdQNOWWL6Npptp6r+E3RJ1E1FQZt6SdCNJNXTL0s0szcOLjeVRi9xydfMxfjRuJN2MdZjGIcyPZ6vLc9t3777Obt5P3n493bycMDwzQ3UlMlS7CyMh8rKWHfetcFPlfmNnbyjOBF57ZpUV1A3CljiEBo60qw17GY4jcMTHW2KFv391hT2PuQLiHZjQwgJxBBd5vrwlbg9TwiGOQG18HUC3HdFXd0S/fOMXtz3RH1hP9MTKWkzpxX9+4B2m76D3c60fbJjaN+sH//LV/WBV+X/pB6uKOob9YMN47f1g1FRVzVILStEuqCWtUMKGVsSmmi/qtm44xkv7wa+hz6ibtmO9uM/4869eXpUZM7q5GUw799bbheX70Yqm0ofNINmd+1ZVmaZ8H31GUfwu+nw/FmauaKi7WBxT6MJlJ2VM8fP+yJhCz7oeY4p9qJcxroFPxxY6VGExXPrd8WUge+0f36TnMXdswaPxjfqsvTW28IkzttB5O3NcwctjBfxr/A1BGYle9AfEfwFXqGb4 \ No newline at end of file +eNrtnU1z28YZgJvpzT+gh55QTE8dgQIIfkEaTYeiZJuyKcqiJFr+GM0SWBAwASyMXVCkPD407bkdZvoHGjtSo3GcZJJpk7TpuYf+AefQ39IXFGVJY1frRrST2i8P/AKw2H323fdj8WLx/kGfJtxn0XtP/EjQhNgCfvAP3j9I6P2UcvG7/ZAKjzmP15qtjUdp4j/7lSdEzOdmZ0ns51hMI+LnbBbO9o1Z2yNiFr7HAR0X87jDnOGz6IEaUs5Jl3J1Trn9QLUZnCoS8ENtJ76gClHusR58dFgqFJsIrs4oasICmu2ScpqoD+/CPyFzaJD91Y2FVmBa6Ed+ticXCSUhbBBJSh8eeJQ40Kg/PPYYF6OnZ6v5KbFtCofTyGaOH3VHn3T3/HhGcagbEEEPoW4RHUMYHfYojTUS+H26f3TU6DMSx4EPNYTts/c4i55M2qKJYUxf3HyYVV6Dlkdi9HWVDyO7CTWp1mfXhgA1Uoxc0cqVPxtoXBA/CoCSFhCo1H483v630xtiYvegJG3SYaP9o4Ofnt6H8dFHDWI3W2eKJIntjT4iSVgqfHH6/ySNhB/S0UFt7cXTTTaenM7MGUbO+vxMwVmLRp+MP+bG7z7765lCqEiGms2grNGf9KfHsAIadYU3emQY+p8TymMQFfrbfThMpPz9x9Ax9F//PJjIzIfNa8c9+u+f/OzxEnTS6Ns2dWaUvKWskEjJ6/miYlhzZnGuUFCuNDae1Can2cj65Jki6EDM0n72z5GkzCsgqAmnYiEVrlb5fCMhEXeho5aPheLA9tKoR53D2kvF4dtMHKB5WXtATDU6iBmn2qSaoyc3tfWj0aPVl744kj2NJV0S+Xtj2Rh9nPUyVMKPvpxsjhOWFQkn10I+epQ38k8nW4474BAaqmuGrunGNwMtAQ6BH/oAc/w+Ga7Q/6YOr69e3EPAEIv46GOjqB+9/nF6n4SGUJvs9Ccl5S14/f3lez0vrZDtZJVL35zdD+ielPQoH/KvXtw+KeNDnT8ZHO+s+c7o2S/hx45ll0t2yTTKdqdj5U1acs2SYVpULxcrNqHG19CZvg2lZL0XswR6l9qgoMRw9GwmJINs2C2YRtEsQVvnFT+yg9ShrbSzxLJG8HklTmjAiPNp7bJWI7ZHtdZYAkcHS9ur1Ua9dtiCStYY6/n0g+/e++nOju3udMKF3Ua3lW6t3GitDLZuirazWWlWB4NSwi+TJbqy2ee+ZXZWQtqvL2tG2awYZcssFDQjp+eMnKHdZ+0b6fXFIejavYK15CVJ5ZqRdKxrEYtL/PKV/q51i+419EZtozqs3Lu/3S6X2tFS2/N7g+3LV3upyXv1jUVzZfEq7xXK26VuunO9C60hwluYnVdAGH3guzAZIxqMEQ1GSF6f049HyLzijBks5M4qx3nlKuj2ZhQM55VWBpPCJwlpC5T0wiqL6LM/AoO07zsL8aJzfam7dL3d7plrXrUVupuX091yjtuiV22mtcaSZ1dXr66v6fwUBEsvaPqEQ0kvVMZyeFL171mrv9zUTg95rRmPrc/oIGI88l13v0UTGEWjQztgqQN6PqH70Ofr1e3Rl5ZudQixy2Vq6AXTcrXl9voBCUCY+vboC89cUIGZqc4rIVmolAq6PrZpv9nPhC/qfvfzuw4RZE55oPqOOqdmBtAG86dV+5UKrXeu5Is3mjVWbDR0wrteXtTzeee+OqOyzj1QK5MjcicmMzdWPLCDDYpKUCjzBN7MsQE8bf800ArAtKwZFTgKrE3ft+mO8MFmzqlg1EgaiGzDkAsa7rhQZ5rEUPXs3G68U85Tp0w6xYKdndNjcDAYabDRfuTQgTqnz0AhgSDq3IOJQVYJaClQ1VFW7HNLrsKPhLopJ1C/KA2ChzNqwLqg1Tr86I8ZFU7uc28HGgaWcbLX3YeXLr07BE9wtb2hiojORaQ40FpkdD4j4VFkJGEE/jgykjDiPjKSMQICiAjV0YXVEVQ4FdmcBoI6D9SvEZBEku5Ed1AlSSAtUpukHLWSTJbQ/ksR7ULMSzEkkfoADBFJEGUzyQhJAomgbZMhokM0bBi1YdT2BhiFDL1IKaVfIKCXAJIzUblg4BCcULm91Fxdvnvp0jTTMMQrpGFwjyVCiRkNMRnj3UvGKE03GaP41iRjmJX/r2SMwtSTMSolxzbMUsmyK/DmlDod6pZN3chXnEKhUq78YMkY9eV7nSuc7W3sbZntbWoN3K1iP+zvdvKb1+iNmpk6rLSY1FLd7Z3kIRSfJ2N4mxvr166bm/Hydn9QjVbWGjc3+w33esmuWoOiWbzW0lv33c2wvbLVrhFD1G+tr7U3B8teg3t73Wqp3Q2CW429fq3rxv62VY9zi25p91WTMYpTSsaobtrN6pXw8pYg1ZvLBcvruDy+Vb9lbjSCmzdcfzV/lQ429nbs2qmMlGK5bP04kzFsQvOFgqlb/2Myxu8//+8+hWuksbdp9KyqWfcXm9urpdLK1vqwFn8/n6L47iVjvG0ET3DVpxQivr2EFJ4iIwmjYFpT6G+zGMUM4gmkJLnQ4NGEIiWZLHnEYbsoTTJOcUCGCOl8SDMISCJFyh10ACSMWsxFD0Bu23we0wTVtjT7mUQ2OgGot38s1/beYkbjixwISZKOgbZNGpHsoqctg5RDQOhpX/juOV94yEiWHNanCaoj6ewIEjqfEEsxDnmVZFUHGUkYdRNiUzcNEJQEVEAJqiWM/NGPfN2MNjyKHpJ0xpaSPjoA8nm2hKVdjEqk09pZzifOtqF1u6AcdRhOAbwCpADDEgzdpqC1p3bDKs5sv9MRCcYk8pXhkND5hHzeQ0TnI8IEm1eI2HZ9YeNgw0jk4osw4GCTOtldRCS5fYTtIiLURTjn/7qztP0g+4KUzpckqKRALxJV0jQSkPEa2yusCYuX2HCidgo5NgwR4TQtOpG4YsQP7kD6jkMRkwwTEpLlH7MeRiEYhWAU8gaikDTB5GyMQqZg1DB/HVU2hiGvP1ckIXFMUR9JH26CkoTrREzjATBJiFnH8icuuIhI5kQiIekVkT0M2KQ3iqKfjdP9mJX9Zu4U9ROEhItpXIxQmqAQSRDhZL/U6vs9jNWkz+tKuW8jJZyEvOCSfowJD8pCUChJF10cklJMO0Yxwqsir5tRFQHJdNEwREbnM4o9FuEEJE71XzwPkpIE7b6EUofiEuzoHF387gf4DHBuBFPYLs7IDTBcw4trGK69fkZNzKhBw39Rbe3buCaUbIERdIykYpT4NHIwEMF82ouLUopX+jEOmdJDM5ASOkgYh+Blox/6/geW4HrieE1kGoIEm9E/QjcbH3L4ZiiFHXQjcTp7Cm4kEnoJITkTlQsWq6eo3F5qri7fvXTpP8g9u4o= \ No newline at end of file diff --git a/docs/cassettes/streaming-tokens_c9e0df34-6020-445e-8ecd-ca4239e9b22b.msgpack.zlib b/docs/cassettes/streaming-tokens_c9e0df34-6020-445e-8ecd-ca4239e9b22b.msgpack.zlib new file mode 100644 index 0000000000..ec6465345b --- /dev/null +++ b/docs/cassettes/streaming-tokens_c9e0df34-6020-445e-8ecd-ca4239e9b22b.msgpack.zlib @@ -0,0 +1 @@ +eNrtnc1v28gVwNurT7301ItKFFigMGVS1BcdGIUs2Yk/FDmWbdn5gDEih+JYJIfhDGXJQYA27aWXouypPS26ydpdIx+7yKLNbrs999B/wD300L9gsX/BDm0ldpDUk9aKmzpPB31xNBz++N6892bejO7t9XDECA2++5AEHEfI4uID+829vQjfjjHjv9j1MXep/WCp0Vy5H0fk4Mcu5yGbnJhAIcnSEAeIZC3qT/T0CctFfEK8Dz18WM2DNrUHB8EdxceMoQ5mymTmxh3FouJUARcflFZEOM6gzBbtipc2jXnGQpwp4xkloh5Oi8QMR8rdW+Ibn9rYS7/qhFzNU9UnAUlLMh5h5IsDPIrx3T0XI1tc1D+/870HLmU8efxyQ58gy8KiAhxY1CZBJ3nU2SHheMbGjoc43hetC/AhhmS/i3GoIo/08O7Rr5JPURh6RLRRHJ/YYjR4OLwalQ9C/Orh/bT5qrj2gCdfVNggsBqiJZW5iaWBwBpk9GzBzJY+7auMIxJ4gpPqIdGo3fDw+J9PHgiR1RU1qcNbluwe/fjxyTKUJR/XkdVovlQliiw3+RhFfjH/9OT3URxw4uNkr7r06umGB49PZ2R1PWt+9lLF6RUljw5fJg+fCf3TS5VgHg1Ui4q6kt9ruxalXYKTg282Ny1ns+1Pbdc7zXht/lpzvr+2zlv2arlR6feLEZtFNTy/2mPENNrzPu7Nzah6ySjrJdPI51U9q2X1rK7epq1r8eL0QAjuTt6suVFUXtCjtrkQ0LDIZi/3ts3reKeu1asrlUF56/ZGq1RsBbWWS7r9jdkr3dhg3bmVaWN++grr5ksbxU68udi5lBGti3vEngqn7cVap7bYanWNJbfS9J3V2Xi7lGUW71YacbVec63K1SvLSxo70TxTy6vasIVFLV/W0sfj54Li4aDD3eS+rmt/iDALhaLgn+8KZDxm9x4IocR//9veUGM+aiwcy/P3H9SEgCZftbA9nsmZmXkUZHJarpDRzUmjMJkvZy7XVx5Wh6dZSeXxIMNxn0/gXvrNkZ5cygg1jRjmUzF31PJnKxEKmCOEdOa5QuxZbhx0sb1ffa0qfJWqgri16fUIJVVxP6QMq8NmJg/X1eWjvkOdqz090juVRh0UkJ1DvUg+SSVcNIIEnw8PhxFNqxQnV30myJSMx8Mjz4VvX1yopuqaqulf9tVIcPCITwTMw+dhZyVk30hRP3u1BBcdTMCST/SCdvT468kyEfZFa9LTH9eUM8XjL68v9aK2fFrILBW/fLmcoHtc0/2cz569enxYx0cae9h/XlgldnLwI/Fh02jbBZwvaU4hjwtO0bEsp+zYpXxbz+X0XMH4QtxMYola0rsX0kjcXWyJ7pkPkoNxH/XTLmfK0AtGUVzrpQwJLC+2cTNu12h6EexSJoywR5H9xHJUC1kuVo8kMNmrbVyt1Oeqf1xXT4qS2ggP+/RkL6AsII6z28SRuDvJvuXR2BZ9Z4R3q7PqcmUj+dzUzDZCtpXDuULeMB11prW8hzzRyJ6VPHWNKWUynzeUSxkfTZWLeU07tBQ/200vKuj84we/tBFHk5k7CrGVSSU1K5YwKmqlVy67DT7tzoTcCZp9IzSXWIcatrVmDJRxhba3hLgOf5E9NkTZQ4EWBSyhAByLOl/oamn8uVk5aVVUIW1ChUuqXha/Ej14j1h4kxNhiSYVYShQ7PH0wIBx7G86os04CkXT03M74WYph+0SahfyVnpOl4ofC9MnLB8JbNxXJrVxUYnHkTJ5Z2jmFCSkX9yAIK32hX1UxIcIOzFDon1B7Hl3xxWPdoS2tNnRF+OKODlh7qa4MGFthqVu3R0be38IHuNquQMFEJ2KKLONGDCSMOIuBkYSRsLHBUYSRoxwLqoCThJOggAggi7pzF2SaHDM09ECAHUaqJ/cDG6CxkkgTWMLxQyUTqZ0BNwAucsdpI0DTBITRwGRBFE6CAeQJJAQ2DYZIjwAwwZBCQQl58DIp+BFSin9EABJxOjrD3/9CCCdDunrD3/7DBi9hpGcicI4FZ7lMZUbtcbVmVtjY6NMBOFvkAjCXBrxTEixD+kg72E6yNzMVvsyozsrO2tGawObfWet0PN72+3c6gK+VjVimxano2qsOd3jfIvCi3QQd3VleWHRWA1nNnr9SjC/VF9f7dWdxaJVMfsFo7DQ1Jq3nVW/Nb/WqiKdz11fXmqt9mfcOnN3OpViq+N51+s7vWrHCcmGORdmp53i9nE6SGXValQu+7NrHFXWZ/Km23ZYeH3uurFS99avOeRq7grur+xsWtUT2SqFUsl8o3SQ4kjTQQr6RUkHyQta/1fpIPmRp4OUcaldMJGp6cVCvp0rIEMrlrVcW7ftdqls5UaXDlKdVauH6SDNUaeDYK1tG6W8oZn/YTrIr578e0vtlZdZc3YjDgdudbBlre5coak0zzf/K0td0N+/dJCLRvAY19yIIviLS0i4W8im2ww4yThtE4cDpdMpjQMg+YDZACDJpssw6mGgJJ3lsIGRhFHoIVA36LTPKEWZm+BHShg1iZe+AUoSy+YSFuII3G3p3KsDiM5r7vUCM7LB/MsYZQEQmP8zr7si3AVGspwrH0fIg6DtDXLTwEGSW38E7rYMUscDF0nCCPlACAbaRqBpdBsgwUAbeNpvmdEKTCC9SdgfWGD55S5kROMOxG1SaYpZFyCBbTujFwkxrQxRO8LQa0sg7cDYiFSOHAhGYNh/FMEIhCMySg0gJJ3zF4chw1bKiXgeMAIvG/JrzyGXPYZeW8KoDbNrMkSQfiRVNJfA8BrYNJgVeeuMqqBmsnl+NrJ9aC/yUH+6IB7CEBmmD4DQ6YQC0TbokqRTIkBItn4NkdHtHHqRI9ouRP0w4A9+9ttmVAFAskRI8QpZx3LDBoROJxRHESCCkSNwsN+y40hhVx/Qs7P/v0MEy2ikf84TRx5AAlWDIA12PQTH6J03aTSCpeqwTc0IzL6LKMyogd0fQWLfBxEMrIFlO/PQI0YQiMBsESwPOQdKdVEbMJIsfLCAkCyPNhZvYddMCEdg18zzYBSQjgvdNoRsMFQL+2a+E0mQsHr2DSayARHM9YNVg90g34V4DaOIQ58NffYItoSCWARGayEWOY+tfCASAeforDuvYY8EMMAmxRQRHNjgIIG+ndU9ojB+JNc2UT9sBw2B7Uj+BRpjiEag24ZoBJLY/+dSNKAxrBGVQbodE+ix5avWIwQeEtg1SK6BhTXvBKV//fR3MDoiXVlz1GBQOZgdOWs8AoReQ0jORGGchsoJKjdqjaszt8bGvgWziYit \ No newline at end of file diff --git a/docs/cassettes/streaming-tokens_e977406d-7be6-4c9f-9185-5e5551f848f3.msgpack.zlib b/docs/cassettes/streaming-tokens_e977406d-7be6-4c9f-9185-5e5551f848f3.msgpack.zlib new file mode 100644 index 0000000000..0c05be40ab --- /dev/null +++ b/docs/cassettes/streaming-tokens_e977406d-7be6-4c9f-9185-5e5551f848f3.msgpack.zlib @@ -0,0 +1 @@ +eNrtnc1T3MgVwHcrN065JFW5KaqcUmiQ5nuGohK+soY1DJhvfxTVklqjBkktpNYwg8uHOLmntJV/IGsvZCmvd7e8lXg32ZxzyD/AHvZ/2PwFeRoGA2XHTTxAMH4cGBi1Wq1fv37vdffT08O9Fo1ixoP3n7BA0IhYAv6JP3q4F9GthMbi97s+FS63H881FhYfJRE7+KUrRBjXh4ZIyHI8pAFhOYv7Qy1jyHKJGIK/Q492q3lscrtzENxXfRrHpEljta7cua9G3KPwl5rENFIHFdXicOlAZF+tRExQhSgbfBM+TJ4IxSIiVh/cg3I+t6mXlWqGQityzWcBy86PRUSJDwdElNAHey4lNtzUHx67PBbp09PN/JxYFoXTaWBxmwXN9LPmDgsHFZs6HhF0H9oS0C6EdH+T0lAjHmvR3cOz0i9IGHoMWgTHhzZiHjzptV0TnZC+fHg/u0UN7jwQ6dejcSewGtCS0amhuQ5ADRQjV6rlKl+0tVgQFnhASfMINGo37B7/28kDIbE2oSat12Hp7uHJT0+W4XH6yQyxGgunqiSR5aafkMgvF5+d/D5KAsF8mu6Nz718ud7B48sVcoaRq315quLsjtLPuh/17m/G/3qqEiqijmZxqCv9k/70CJZHg6Zw00eGof85onEIokJ/twuniSR++Bg6hv7rn3s9mfm48eFRj37/3k8fT0Anpd+uUHtQydeUaRIoeT1fUoxavVCql4rKBzOLT8Z7l1nM+uRAEbQthmgr++ZQUoYVENQopmIkEY5W/XIxIkHsQEdNHgnFnuUmwSa198dfKQ7fZuIAt5fdDwizRtshj6nWa2b6ZFW7dTh6tKmJZ4eyp/GoSQK205WN9NOsl6ERLPiqdziMeFYlXFzz4/RRvmo87R056oB9uFFdM3RNN75paxFw8JjPAGb3d2+4Qv8XdPh5/nIJAUMqiNNPjZJ++POPk2Ui6kNrsssf15Svwc/fX13qRW3FrFCtUv7mdDmge1zTo7wfP3/5eK+Oj/X4SfuosMbs9OAX8M86LRRMs1S1C6V82agUi9V83nKccq1QdgzbtsyvoTOZBbVkvRfyCHqXWqCgRCc9GPRJOxt2IwWjVCjDvQ4rLLC8xKYLiTnBs5uIh5Uwoh4n9ufjv9HGieVSbaErgenexNrs6MzU+P4CNHKc801GP/ru/R+tr1vOuumPmGKMLPrLo9Nu4+aNpVV7tT2+VVtanJ+3F0JRs5NyMLXV3loudlZnNKNSqBqVWqFU1IycnjNyhrY0b/Jmsjy/Zt6u5efXSWVH50s7SVKJt2fL1ZtL7cVqUPZbca1RWd6ZKnbak3PTt0uG7wT5CeoXrbGx2+2txdVybXLGNLxqu1kb23Tn4W6IcEeGhhUQRgZ8R3pjRIMxosEIyet1/WiEDCt2l8FI7rRyHFZugG5vBF5nWFnIYFL4JD5dAKU8MssDevBHYJC0mD2ythRuTC1Pbmzs2DvLs3S0mA+8lblpcyyaW6Mrq4252dKNuU576sMbaycgFPSSpvc4lPVitSuHx01/w1b9ZVU7OeS1Rti1PulewOOAOc7uAo1gFKX7lscTG/R8RHehz2+NrqVf1fSaSUzdMInhGNWarU2u3NojHghTy0qfuYURtV4sFtRhxScj1XJR17s27be7mfAFze9+9mubCFJX7qvMVutqZgAtMH/aaKtaDcbntqcnVmZWtttzSzdbrery9E1ScmuJOqhycwPUSu+M3LHJzHUVDxSwQFEJCnW+gFcYPDKAJ+2fBlqhqOkVzajCWWBtWsyi64KBZa2rYNRI4onsQCcW1F93oM00CqHp2bWdcL2Sp3aFmKWilV3T5XAyGGmw0SywaVut64NQiSeIWj8y2yoBLQWqOsiqfWG5Vfgnok4SE2hfkHjeg0HV403QamZ8+MWgChdnsbsONwaWsVfq3oOBgXeH4DGuFbejIqLXIlK2SYyMJIyES5GRhBH448hIwihmQkBVyEnCCQggIlRJfaskaHAi4BoI6rWgfnU3uIsjTgJpjFokiXHQyQYdQzdA7nIHWeMQk8TEcUQkQZQtliIkCSSCtk2GiHbQsOGkBCcll8DI5+hFSin9HAG9ApCciRoLDg7BMZU7E43ZyXsDA+cZaSDeINIgdnkklJBTv894g+/f+zFGHFyxiINdq7t7mx78+4pv3l7Atuoroi3K/1u0xU/elWgLo5Z/u6ItiucebVElVWLYxQIxK5VSpVItOXaF1px83imVino5f37RFpajWd1oi/h0tMU57OKXSbVkv8Eu/vMf/rul5ltld9rhHzhjN5uJV9hYmm2Mlactvfpmlrp4EZbatKuFQr7mlAtXcRf/uhE8xjV1ThOv60sI3Cti8+0YOck4eZRuIqXXUxpEQPJ1jg5Cko017ggPMUllCVpnIyVUSf2JkXIXvSRZNCgTLjKS7U27LA5pRFElyUg1I2JRpCTdfEVJku6++jQiHnKSu93oK6GvhL7SJTBadNG0yXfyOcqRBJLHmq5ASBJJskmQFURMlxOBdb1XJlmElCSUnAQZoReJXuRFMxpFmyZ1IpkdoI90lu0kCu0TKFBSToJEKE2X9WzRtRYk9CRlkHIISO4loZ90htU2DJSQ6yOw/xbu4OKOW/+McLRJCJFtVNq4QtKvJkJCEkIRbRIPKUnTZuECCaojXLC9eEZzCEjmF+EKpNTso66+tEQZ19o3IjbO09Dq9ylGLiURbhudISTCw4mINADJQUQSRLiZjVob52r4ONsVkaPQIx0wbchJxgkJvZ4QTzBEGw0bpo64FEjQ6hgXSHAR6RyerOnmkERKqLVxOnLBsf4ISJZag0f4kD+usZ2HIAVZTlbkJHsVDSptaeqRFo1wPiJN9eOhpy0LjEAvG58VwWdFLmHZHxf9JYRYjCl+JYjQezzD8to2w8eN5IPtvF5njMtr7/AspINba/KwWhxosmRjfBsRoS7CpX58Icv/fQeb7+CymhxSFOBM5AxbIpiHRRrlz5o4VcPMEP0zQh8SfUj0ITFc5ApsOsIn7jti5Hq/hJII3Ue0aJjK5xJed4iQpG86MvGtWaiP0MPGVVp8W8Zb42FHNKYBam3U2n2PN+ajFEkzscBh3BORxvZjTp+zJD9ArY1h2RiWfQmUGrj9iO5Rv5afeixAhS3FFDGKLhKOt74FyeSYQUv+sDHDVCwYWXMugoTZD1Bn48I/Lvxj2oO3KHU2UkKV3efL1niCjGRhx5S00M0+gyThdq08eSY62qi10dHGGPar8Oa+rMGISZpfjGB8DepsjNK+cEKbAd/GlDVnedk6iVAhoUJCJxJXa/FFB2/H2zI9TMGKycXPIQECLkSi4cc1/8tQ2ayJxl8eNoJBWvKQSI/gg2wY6d/3dA0JvYKQnIkaCx6qJ6jcmWjMTt4bGPgPVCH+Og== \ No newline at end of file diff --git a/docs/cassettes/streaming-tokens_fdeee9d9-2625-403a-9253-418a0feeed77.msgpack.zlib b/docs/cassettes/streaming-tokens_fdeee9d9-2625-403a-9253-418a0feeed77.msgpack.zlib new file mode 100644 index 0000000000..fd77399049 --- /dev/null +++ b/docs/cassettes/streaming-tokens_fdeee9d9-2625-403a-9253-418a0feeed77.msgpack.zlib @@ -0,0 +1 @@ +eNrtnU1z28YZgNurTr300BuK6akjUAC/KVfT0Vdi2ZEoWd/+GM1isSBXArDQ7oIi5fFM6/YPoLceGztSo3GcZOxpk7TpuYf+AfWQQ39Cf0EXFGXJY1ermhTTKK8OEkWAi8XD92vfd3fx+LBFuKAs+uEzGknCEZbqH/G7x4ec7CZEyN8ehEQ2mfd0sb688iTh9PjnTSljMT42hmKaYzGJEM1hFo61nDHcRHJMvY4D0m3mqcu8znH00AyJEKhBhDlu3HtochYQ9cpMBOHmqGFipi4dyeytdU4lMZCxzXbUH5cl0sBICvPRA3VeyDwSZGc1YmkVmRXSiGafF5ITFKoDkifk0WGTIE/d1Dc/+NHTJhMyff56Rz9FGBPVAIkw82jUSD9p7NN41PCIHyBJjlRvItLFkB7tEBJbKKAtcnDyqfQzFMcBVX1Sx8e2BYue9XpvyU5M3jx8lN2kpe49kumXk6IT4brqyeTc2GJHYY0MJ1eq5SqftS0hEY0CxckKkOrUQdw9/pfzB2KEd1RLVu8rSw9OPvz8/DlMpB/NI1xffq1JxHEz/QjxsFx8cf59nkSShiQ9nF5883K9g2eXK+QcJ1f7/LWGsztKP+n+Ge/+puzPrzVCJO9YmKm20j/YB5ixHUrS439vbWF/yw0nXDmFVsK1yVvN+gc3Vze8jfb0bm11ZWnJW45lzUvK0dxue3et2NmYt5xKoepUaoVS0XJyds7JOdbqkssaydrSpnu3ll/aQpV9m63uJ0lF7C2Uqx+stleqUTlsiVq9srY/V+y0Zxdv3S05oR/lZ0hYxFNTd9u7Kxvl2uy86wTVdqM2tdNcumGo3iUt6k1srsbbc2uz29v73v7aApks5qNgffGWO8UXN8n6Rn1xoXRzsdOeu31z81z3CnbJsns9LNvFqp39PD8VlIBEDdlMnziO/UdORKwUhfzmQCGTiXj8VAkl+cffD3sa82H99pk8//jpjBLQ9Ot14o0a+ZpxC0VG3s6XDKc2XiiNlyrG+/Mrz6Z7l1nJ5PHYkKQtx0gre+dET24YSk25IHIikb5V/XyFo0j4SkhnTxXiEDeTaId4R9NvVYWvM1VQX212P0qVLdKOmSBWr5vpsw3rzontsOZmXpzoncV4A0V0v6sX6ceZhKtO0Ohl73DMWdakurgVivRJ3q487x05Fb4jdaO25diW7XzVtrjiENCQKpjd3z1jpWS/kKH+4s0zpDIokUg/dkr2yc/fzp/DSah6k13+rKV8Tf389e1nvWqtmJ1Uq5S/ev08RfespSf5UHzx5vFeGx/a4ln79GSLeunxz9Q/W9ghdgEX/CKyq65fqOVtXPZ9x8FuGZF8zflSfZkUq1ayby9mXH27BCvzLDvp8WiI2pnJmSg4pUJZ3esNg0Y4SDyynLgzLLsJccOIOQkY8j6dfs+aRrhJrOWuBKaHM5sLk/Nz03/asM6LklWPuzY9PYyYiKjvHywTrr6d9AgHLPGU7eTkQLV1Z3IzfVmzay5ynUreJ1WnWvOs2fU7hyhQnWzh9EWzMGGOF4sF84YRoolquWjbXU/x64PspqLGP3/yCw9JNG48NKlnjpuZW8HKqViTrWqVo6XNu5Xbso5X3N3WDIpvLk23BasE5qjJ3G0lrr1P5M4cUa4r0OoErBRAEtXmK12tjJ66lfNexVLSVrTsiuVU1aeUBW9RTLYkVf5q3FSOAiWBzA50hCThlq/6THisup5d24+3KnniVZBbKuLsmk2mPqxcn/J8NPJI2xy3R1UjgUTm+KkzNJGSfmUCoqzZV/7QVP9w4icCqf5FSRA8GjUD1lDa4oqTN0ZNdXEqmlvqxpS36Z314NHIyPeH4Bmu9WbHBEQXIjI8dbfA6GJGskmAkYaRinGBkYaRoMBIx0gRAERgjvo2R6rDiVTXAFAXgvrl/eg+aJwG0hTBKBGgdDqlA/emRbSnhnQEIm6ti2OASIMoS8ABJA0kBL5Nh4h0wLHBoAQGJUNgFDKIIrWUfgqA3gJIz8QUkqmA4IzKvZn6wuyDkZFB1u7lO9TuRZNxacSMhFDBhwr+9a7glwdbwa9elwp+oWB/tyr4xYFX8L08QrWSly+WKw4uVorYccuoZJMCKmMb19DgKvjYt3C3gi8GXsHHBa/svUMF/2Xjv3tqsbaAW+2Zze3aQmV+j9lxh5duVjft+N08dfX7V8G/bgTPcM0NaOB1fQmp8Ap5bE8AJx2ngJAdoHQxpVEANKRU0HXWNBpkrwDTxZhiBFYb7FG/UmTcB4OkmwZKZRMYaeSoRYIWAaOtw9TgCBOgBFa772pZByDpIKEdULVLSBLlQEk7O31gldfrSykHgCDY7n/NFRC6mBAVkIbUICIcMiNax79HJQZl085JB4utn5YGSW0dpIBCbkSbGwnYHkCC1AjE2FfMaBIA6Vyaag4gacYhGAjpNsdAEWT7waX1vQoNRrM6RC4noGgaSPsE5EgnRz6MQSDPP4gxCIxCdJRmO2CPtJIUUKhh6yFlm2JwUDgNJ9A2GIn0HWbTRhMyI/ptQzxgpNvJiMKMGjBIkO2/ckZ3iB9k3QdMQ9qB5jovzSKqfxICSe0qPx8Q6Ups1IsgmLyMzu1QSJRAThIipStntAKrai6xFCLhAUCCtO0AMgBNKmKYqg1ZgP5DJOZDHAlJyUFsr85DgATmCEJtmIL8bW/vA4QuJpTwbGtIoKSRoyZBXLoEQYAEfq3fCBIIafeJCEIwSpfg1IRAG3LaMM92GJSmE9jfR7c6m0BqBDZC6D8NmQAjDSMXyiIaQiiEggiM1GB/yOFUjQLApF3E3n2WFlACkwRFkSseqcF2bLrgSEB27RKlI5AjnRzB80V0iBiMQyAnAjWjIVjrAHX8BObUaodrEgYiUDSCgQg8NvP/QYpIi3DIHem39IUdECF5BFHkMB52FICe6SAlkc8CD8a1YI8ghrxqRnXYAkG/IgtTeNLxJVbTMg6+DWw2rH8czmIRiI9ge5/+R/4q0AZIkKuFCf5XH2lDuR+Co34XZGW9SzhsNAa+H3bSHAYj2PvwEpT+9avfg0HSViJDlyMoskEMAEntq386PZUQbGuH/uovFCMvsfwoCcC7QdZ2EI/SgFyb3ipBkARBEgRJMHsUZo9+Z6qRLASLDRYb9kIYQghJUAt07RKiRGEzLe2QjUNiG6w2xNlXzeg9xrM4EjBpp4+2YH8/sEgQRw5lkTZBsJQdJkgMaCoyUIIZpP87ID0TU0gWm+eo3JupL8w+GBn5DxonyBs= \ No newline at end of file diff --git a/docs/docs/how-tos/streaming-tokens.ipynb b/docs/docs/how-tos/streaming-tokens.ipynb index e668c85a99..c2828ef5b5 100644 --- a/docs/docs/how-tos/streaming-tokens.ipynb +++ b/docs/docs/how-tos/streaming-tokens.ipynb @@ -42,7 +42,7 @@ "\n", "!!! note \"Using without LangChain\"\n", "\n", - " If you need to stream LLM tokens **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/streaming/#stream_modecustom) to stream the outputs from LLM provider clients directly. Check out the [example below](#using-without-langchain) to learn more.\n", + " If you need to stream LLM tokens **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/streaming/#stream_modecustom) to stream the outputs from LLM provider clients directly. Check out the [example below](#example-without-langchain) to learn more.\n", "\n", "!!! warning \"Note on Python < 3.11\"\n", " \n", @@ -67,8 +67,8 @@ "metadata": {}, "outputs": [], "source": [ - "# %%capture --no-stderr\n", - "# %pip install --quiet -U langgraph langchain_openai" + "%%capture --no-stderr\n", + "%pip install --quiet -U langgraph langchain_openai" ] }, { @@ -81,7 +81,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "id": "a372be6f", "metadata": {}, "outputs": [ @@ -149,7 +149,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "id": "7cc5905f-df82-4b31-84ad-2054f463aee8", "metadata": {}, "outputs": [], @@ -160,8 +160,8 @@ "\n", "\n", "# Note: we're adding the tags here to be able to filter the model outputs down the line\n", - "joke_model = ChatOpenAI(model=\"gpt-4o-mini\").with_config(tags=[\"joke\"])\n", - "poem_model = ChatOpenAI(model=\"gpt-4o-mini\").with_config(tags=[\"poem\"])\n", + "joke_model = ChatOpenAI(model=\"gpt-4o-mini\", tags=[\"joke\"])\n", + "poem_model = ChatOpenAI(model=\"gpt-4o-mini\", tags=[\"poem\"])\n", "\n", "\n", "class State(TypedDict):\n", @@ -195,7 +195,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "id": "96050fba", "metadata": {}, "outputs": [ @@ -204,25 +204,25 @@ "output_type": "stream", "text": [ "Writing joke...\n", - "Why| did| the| cat| sit| on| the| computer|?\n", + "Why| was| the| cat| sitting| on| the| computer|?\n", "\n", "|Because| it| wanted| to| keep| an| eye| on| the| mouse|!|\n", "\n", "Writing poem...\n", - "In| sun|lit| patches|,| they| softly| tread|,| \n", - "|Wh|isk|ers| twitch|ing|,| with| grace| they| spread|.| \n", - "|With| eyes| like| lantern|s|,| glowing| bright|,| \n", - "|They| dance| through| shadows|,| a| silent| flight|.| \n", + "In| sun|lit| patches|,| sleek| and| sly|,| \n", + "|Wh|isk|ers| twitch| as| shadows| fly|.| \n", + "|With| velvet| paws| and| eyes| so| bright|,| \n", + "|They| dance| through| dreams|,| both| day| and| night|.| \n", "\n", - "|P|aws| like| whispers| on| the| floor|,| \n", - "|Cur|led| up| tight|,| they| dream| and| sn|ore|.| \n", - "|Ch|asing| ph|ant|oms| in| the| night|,| \n", - "|F|eline| secrets|,| hidden| from| sight|.| \n", + "|A| playful| p|ounce|,| a| gentle| p|urr|,| \n", + "|In| every| leap|,| a| soft| allure|.| \n", + "|Cur|led| in| warmth|,| a| silent| grace|,| \n", + "|Each| furry| friend|,| a| warm| embrace|.| \n", "\n", - "|A| gentle| p|urr|,| a| playful| sw|at|,| \n", - "|In| every| corner|,| a| cozy| spot|.| \n", - "|Maj|estic| hunters|,| soft| as| a| sigh|,| \n", - "|In| the| hearts| of| many|,| forever| they| lie|.| |" + "|Myst|ery| wrapped| in| fur| and| charm|,| \n", + "|A| soothing| presence|,| a| gentle| balm|.| \n", + "|In| their| gaze|,| the| world| slows| down|,| \n", + "|For| in| their| realm|,| we're| all| ren|own|.|" ] } ], @@ -238,7 +238,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 5, "id": "bcdf561d-a5cd-4197-9c65-9ab8af85941f", "metadata": {}, "outputs": [ @@ -249,15 +249,16 @@ " 'langgraph_node': 'call_model',\n", " 'langgraph_triggers': ['start:call_model'],\n", " 'langgraph_path': ('__pregel_pull', 'call_model'),\n", - " 'langgraph_checkpoint_ns': 'call_model:eeaca45a-85f2-c80f-e985-704a168a5d8c',\n", - " 'checkpoint_ns': 'call_model:eeaca45a-85f2-c80f-e985-704a168a5d8c',\n", + " 'langgraph_checkpoint_ns': 'call_model:6ddc5f0f-1dd0-325d-3014-f949286ce595',\n", + " 'checkpoint_ns': 'call_model:6ddc5f0f-1dd0-325d-3014-f949286ce595',\n", " 'ls_provider': 'openai',\n", " 'ls_model_name': 'gpt-4o-mini',\n", " 'ls_model_type': 'chat',\n", - " 'ls_temperature': 0.7}" + " 'ls_temperature': 0.7,\n", + " 'tags': ['poem']}" ] }, - "execution_count": 6, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -279,13 +280,13 @@ "id": "a3a72acd-98cc-43f6-9dbb-0e97d03d211b", "metadata": {}, "source": [ - "You can see that we're streaming tokens from all of the LLM invocations. Let's now filter the streamed tokens to include only a specific LLM invocation. We will use `.astream_events()` method for this, and filter events using the tags we've added to the LLMs previously:" + "You can see that we're streaming tokens from all of the LLM invocations. Let's now filter the streamed tokens to include only a specific LLM invocation. We can use the streamed metadata and filter events using the tags we've added to the LLMs previously:" ] }, { "cell_type": "code", - "execution_count": 7, - "id": "17354369-32d9-4413-8b4b-2ae9786948f7", + "execution_count": 6, + "id": "c9e0df34-6020-445e-8ecd-ca4239e9b22b", "metadata": {}, "outputs": [ { @@ -293,27 +294,22 @@ "output_type": "stream", "text": [ "Writing joke...\n", - "Why| did| the| cat| sit| on| the| computer|?\n", + "Why| was| the| cat| sitting| on| the| computer|?\n", "\n", - "|Because| it| wanted| to| keep| an| eye| on| the| mouse|!| 🐱|💻|\n", + "|Because| it| wanted| to| keep| an| eye| on| the| mouse|!|\n", "\n", "Writing poem...\n" ] } ], "source": [ - "# highlight-next-line\n", - "async for event in graph.astream_events(\n", + "async for msg, metadata in graph.astream(\n", " {\"topic\": \"cats\"},\n", - " # highlight-next-line\n", - " version=\"v2\",\n", + " stream_mode=\"messages\",\n", "):\n", - " # filter on the custom tag\n", " # highlight-next-line\n", - " if event[\"event\"] == \"on_chat_model_stream\" and \"joke\" in event.get(\"tags\", []):\n", - " data = event[\"data\"]\n", - " if data[\"chunk\"].content:\n", - " print(data[\"chunk\"].content, end=\"|\", flush=True)" + " if msg.content and \"joke\" in metadata.get(\"tags\", []):\n", + " print(msg.content, end=\"|\", flush=True)" ] }, { @@ -326,7 +322,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 7, "id": "699b3bab-9da7-4f2a-8006-93289350d89d", "metadata": {}, "outputs": [], @@ -392,7 +388,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 8, "id": "e977406d-7be6-4c9f-9185-5e5551f848f3", "metadata": {}, "outputs": [ @@ -401,25 +397,25 @@ "output_type": "stream", "text": [ "Writing joke...\n", - "Why| did| the| cat| sit| on| the| computer|?\n", + "Why| was| the| cat| sitting| on| the| computer|?\n", "\n", - "|Because| it| wanted| to| keep| an| eye| on| the| mouse|!|\n", + "|Because| it| wanted| to| keep| an| eye| on| the|\n", "\n", "Writing poem...\n", - "In| shadows| soft|,| on| silent| paws|,| \n", - "|A| whisk|ered| muse| with| gentle| claws|,| \n", - "|They| weave| through| dreams| in| moon|lit| grace|,| \n", - "|A| dance| of| warmth| in| a| sun|lit| place|.| \n", + " mouse|!|In| sun|lit| patches|,| they| stretch| and| y|awn|,| \n", + "|With| whispered| paws| at| the| break| of| dawn|.| \n", + "|Wh|isk|ers| twitch| in| the| morning| light|,| \n", + "|Sil|ken| shadows|,| a| graceful| sight|.| \n", "\n", - "|With| eyes| like| stars|,| they| peer| so| wise|,| \n", - "|The| world| reflected| in| their| guise|.| \n", - "|From| playful| leaps| to| cozy| curls|,| \n", - "|In| each| sweet| p|urr|,| a| magic| sw|irls|.| \n", + "|The| gentle| p|urr|s|,| a| soothing| song|,| \n", + "|In| a| world| of| comfort|,| where| they| belong|.| \n", + "|M|yster|ious| hearts| wrapped| in| soft|est| fur|,| \n", + "|F|eline| whispers| in| every| p|urr|.| \n", "\n", - "|Oh|,| feline| friends|,| with| hearts| so| bold|,| \n", - "|In| every| tale|,| your| love| unfolds|.| \n", - "|A| quiet| comfort|,| a| steadfast| glance|,| \n", - "|In| the| company| of| cats|,| we| find| our| trance|.|" + "|Ch|asing| dreams| on| a| moon|lit| chase|,| \n", + "|With| a| flick| of| a| tail|,| they| glide| with| grace|.| \n", + "|Oh|,| playful| spirits| of| whisk|ered| cheer|,| \n", + "|In| your| quiet| company|,| the| world| feels| near|.| |" ] } ], @@ -434,7 +430,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 9, "id": "0bdc1635-f424-4a5f-95db-e993bb16adb2", "metadata": {}, "outputs": [ @@ -445,11 +441,11 @@ " 'langgraph_node': 'call_model',\n", " 'langgraph_triggers': ['start:call_model'],\n", " 'langgraph_path': ('__pregel_pull', 'call_model'),\n", - " 'langgraph_checkpoint_ns': 'call_model:ca83e792-dddc-7f99-c8ff-4e8c166106f6',\n", + " 'langgraph_checkpoint_ns': 'call_model:3fa3fbe1-39d8-5209-dd77-0da38d4cc1c9',\n", " 'tags': ['poem']}" ] }, - "execution_count": 10, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -468,7 +464,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 10, "id": "fdeee9d9-2625-403a-9253-418a0feeed77", "metadata": {}, "outputs": [ @@ -480,29 +476,29 @@ "\n", "\n", "Writing poem...\n", - "In| shadows| sleek|,| with| eyes| ag|low|,| \n", - "|A| whisper| of| grace|,| as| they| softly| flow|,| \n", - "|With| p|itter|-p|atter| on| the| midnight| floor|,| \n", - "|Cur|iosity| blooms|,| they| explore| more| and| more|.| \n", + "In| shadows| soft|,| they| weave| and| play|,| \n", + "|With| whispered| paws|,| they| greet| the| day|.| \n", + "|Eyes| like| lantern|s|,| bright| and| keen|,| \n", + "|Guard|ians| of| secrets|,| unseen|,| serene|.| \n", "\n", - "|A| stretch| and| a| y|awn|,| in| sun|beam|'s| embrace|,| \n", - "|Ch|asing| the| dust| mot|es| that| dance| with| such| grace|,| \n", - "|Each| p|ounce| a| ballet|,| each| leap| a| delight|,| \n", - "|The| world| is| their| playground| from| morning| to| night|.| \n", + "|They| twist| and| stretch| in| sun|lit| beams|,| \n", + "|Ch|asing| the| echoes| of| half|-|formed| dreams|.| \n", + "|With| p|urring| songs| that| soothe| the| night|,| \n", + "|F|eline| spirits|,| pure| delight|.| \n", "\n", - "|F|urred| confid|ants|,| both| sly| and| serene|,| \n", - "|With| silent| mis|chief|,| they| dwell| in| between|,| \n", - "|In| the| heart| of| our| homes|,| they| fro|lic| and| play|,| \n", - "|Oh|,| marvelous| creatures|,| in| every| way|.|" + "|On| windows|ills|,| they| perch| and| stare|,| \n", + "|Ad|vent|urers| bold| with| a| graceful| flair|.| \n", + "|In| every| leap| and| playful| bound|,| \n", + "|The| magic| of| cats|—|where| love| is| found|.|" ] } ], "source": [ "async for msg, metadata in graph.astream(\n", " {\"topic\": \"cats\"},\n", - " # highlight-next-line\n", " stream_mode=\"custom\",\n", "):\n", + " # highlight-next-line\n", " if \"poem\" in metadata.get(\"tags\", []):\n", " print(msg[\"content\"], end=\"|\", flush=True)" ] diff --git a/docs/docs/how-tos/streaming/streaming.ipynb b/docs/docs/how-tos/streaming/streaming.ipynb index 4327f47dda..6c577fc580 100644 --- a/docs/docs/how-tos/streaming/streaming.ipynb +++ b/docs/docs/how-tos/streaming/streaming.ipynb @@ -25,10 +25,10 @@ "LangGraph is built with first class support for streaming. There are several different ways to stream back outputs from a graph run:\n", "\n", "- `\"values\"`: Emit all values in the state after each step.\n", - "- `\"updates\"`: Emit only the node name(s) and updates returned by the node(s) after each step.\n", + "- `\"updates\"`: Emit only the node or task names and updates returned by the nodes or tasks after each step.\n", " If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are emitted separately.\n", - "- `\"custom\"`: Emit custom data using from inside graph nodes via `writer: StreamWriter` kwarg of each node.\n", - "- `\"messages\"`: Emit LLM messages token-by-token together with metadata for the graph node where LLM is invoked.\n", + "- `\"custom\"`: Emit custom data using from inside nodes or tasks using `StreamWriter`.\n", + "- `\"messages\"`: Emit LLM messages token-by-token together with metadata for any LLM invocations inside nodes or tasks.\n", "- `\"debug\"`: Emit debug events with as much information as possible for each step.\n", "\n", "You can stream outputs from the graph by using `.stream()` / `.astream()` methods:\n", diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 584f15b447..c3ff981ae5 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -59,10 +59,11 @@ plugins: - redirects: redirect_maps: # lib redirects - 'how-tos/stream-values.md': 'how-tos/streaming/streaming.md' - 'how-tos/stream-updates.md': 'how-tos/streaming/streaming.md' - 'how-tos/streaming-content.md': 'how-tos/streaming/streaming.md' - 'how-tos/stream-multiple.md': 'how-tos/streaming/streaming.md' + 'how-tos/stream-values.md': 'how-tos/streaming/streaming.md#stream_modevalues' + 'how-tos/stream-updates.md': 'how-tos/streaming/streaming.md#stream_modeupdates' + 'how-tos/streaming-content.md': 'how-tos/streaming/streaming.md#stream_modecustom' + 'how-tos/stream-multiple.md': 'how-tos/streaming/streaming.md#multiple-streaming-modes' + 'how-tos/streaming-tokens-without-langchain.md': 'how-tos/streaming-tokens.md#example-without-langchain' # cloud redirects 'cloud/index.md': 'concepts/index.md#langgraph-platform' 'cloud/how-tos/index.md': 'how-tos/index.md#langgraph-platform' From ababe1aafc239301f3eb2da0a446895d9ab27da5 Mon Sep 17 00:00:00 2001 From: vbarda Date: Wed, 29 Jan 2025 15:09:11 -0500 Subject: [PATCH 04/14] move --- docs/docs/how-tos/index.md | 2 +- docs/docs/how-tos/streaming-tokens.ipynb | 6 +++--- docs/docs/how-tos/{streaming => }/streaming.ipynb | 0 docs/docs/how-tos/{streaming => }/streaming.md | 0 docs/mkdocs.yml | 10 +++++----- 5 files changed, 9 insertions(+), 9 deletions(-) rename docs/docs/how-tos/{streaming => }/streaming.ipynb (100%) rename docs/docs/how-tos/{streaming => }/streaming.md (100%) diff --git a/docs/docs/how-tos/index.md b/docs/docs/how-tos/index.md index 9f649ccbbc..217fcb07da 100644 --- a/docs/docs/how-tos/index.md +++ b/docs/docs/how-tos/index.md @@ -81,7 +81,7 @@ See the below guides for how-to implement human-in-the-loop workflows with the ( [Streaming](../concepts/streaming.md) is crucial for enhancing the responsiveness of applications built on LLMs. By displaying output progressively, even before a complete response is ready, streaming significantly improves user experience (UX), particularly when dealing with the latency of LLMs. -- [How to stream graph outputs](streaming/streaming.ipynb) +- [How to stream graph outputs](streaming.ipynb) - [How to stream LLM tokens](streaming-tokens.ipynb) - [How to stream events from within a tool](streaming-events-from-within-tools.ipynb) - [How to stream events from within a tool without LangChain models](streaming-events-from-within-tools-without-langchain.ipynb) diff --git a/docs/docs/how-tos/streaming-tokens.ipynb b/docs/docs/how-tos/streaming-tokens.ipynb index c2828ef5b5..7a06730bce 100644 --- a/docs/docs/how-tos/streaming-tokens.ipynb +++ b/docs/docs/how-tos/streaming-tokens.ipynb @@ -42,7 +42,7 @@ "\n", "!!! note \"Using without LangChain\"\n", "\n", - " If you need to stream LLM tokens **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/streaming/#stream_modecustom) to stream the outputs from LLM provider clients directly. Check out the [example below](#example-without-langchain) to learn more.\n", + " If you need to stream LLM tokens **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/#stream_modecustom) to stream the outputs from LLM provider clients directly. Check out the [example below](#example-without-langchain) to learn more.\n", "\n", "!!! warning \"Note on Python < 3.11\"\n", " \n", @@ -86,7 +86,7 @@ "metadata": {}, "outputs": [ { - "name": "stdin", + "name": "stdout", "output_type": "stream", "text": [ "OPENAI_API_KEY: ········\n" @@ -383,7 +383,7 @@ "source": [ "!!! note \"stream_mode=\"custom\"\"\n", "\n", - " When streaming LLM tokens without LangChain, we recommend using [`stream_mode=\"custom\"`](../streaming/streaming/#stream-modecustom). This allows you to explicitly control which data from the LLM provider APIs to include in LangGraph streamed outputs, including any additional metadata." + " When streaming LLM tokens without LangChain, we recommend using [`stream_mode=\"custom\"`](../streaming/#stream-modecustom). This allows you to explicitly control which data from the LLM provider APIs to include in LangGraph streamed outputs, including any additional metadata." ] }, { diff --git a/docs/docs/how-tos/streaming/streaming.ipynb b/docs/docs/how-tos/streaming.ipynb similarity index 100% rename from docs/docs/how-tos/streaming/streaming.ipynb rename to docs/docs/how-tos/streaming.ipynb diff --git a/docs/docs/how-tos/streaming/streaming.md b/docs/docs/how-tos/streaming.md similarity index 100% rename from docs/docs/how-tos/streaming/streaming.md rename to docs/docs/how-tos/streaming.md diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index c3ff981ae5..932e6099cb 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -59,10 +59,10 @@ plugins: - redirects: redirect_maps: # lib redirects - 'how-tos/stream-values.md': 'how-tos/streaming/streaming.md#stream_modevalues' - 'how-tos/stream-updates.md': 'how-tos/streaming/streaming.md#stream_modeupdates' - 'how-tos/streaming-content.md': 'how-tos/streaming/streaming.md#stream_modecustom' - 'how-tos/stream-multiple.md': 'how-tos/streaming/streaming.md#multiple-streaming-modes' + 'how-tos/stream-values.md': 'how-tos/streaming.md#stream_modevalues' + 'how-tos/stream-updates.md': 'how-tos/streaming.md#stream_modeupdates' + 'how-tos/streaming-content.md': 'how-tos/streaming.md#stream_modecustom' + 'how-tos/stream-multiple.md': 'how-tos/streaming.md#multiple-streaming-modes' 'how-tos/streaming-tokens-without-langchain.md': 'how-tos/streaming-tokens.md#example-without-langchain' # cloud redirects 'cloud/index.md': 'concepts/index.md#langgraph-platform' @@ -145,7 +145,7 @@ nav: - how-tos/review-tool-calls-functional.ipynb - Streaming: - Streaming: how-tos#streaming - - how-tos/streaming/streaming.ipynb + - how-tos/streaming.ipynb - how-tos/streaming-tokens.ipynb - how-tos/streaming-events-from-within-tools.ipynb - how-tos/streaming-events-from-within-tools-without-langchain.ipynb From c265e99c3ae2f77f16df7754d8a8e60fd2fac698 Mon Sep 17 00:00:00 2001 From: vbarda Date: Wed, 29 Jan 2025 15:40:42 -0500 Subject: [PATCH 05/14] update subgraphs --- .../streaming-subgraphs_7.msgpack.zlib | 1 - docs/docs/how-tos/streaming-subgraphs.ipynb | 329 ++++-------------- 2 files changed, 75 insertions(+), 255 deletions(-) delete mode 100644 docs/cassettes/streaming-subgraphs_7.msgpack.zlib diff --git a/docs/cassettes/streaming-subgraphs_7.msgpack.zlib b/docs/cassettes/streaming-subgraphs_7.msgpack.zlib deleted file mode 100644 index 6756b11d43..0000000000 --- a/docs/cassettes/streaming-subgraphs_7.msgpack.zlib +++ /dev/null @@ -1 +0,0 @@ -eNrMuwVc02vfPz4bBYMOBaTLEaP7UIONkpEbJbAB2xgdUnqUGiVudEs3KC2l0t2gpJJKKxISwn+ecx/Pfd/Peep+nuf1/13b2PW94hPvz/uK77Uvj/M8Ua5uaCfHM8VoR3eUq5WNO+nCjfA4zxXl4oFycw/MxaHc7Z2QWepqBlkerujJ86f27u7ObjJCQjiUK84KjRREO2KF0Dg7Iai+kaSVsbgzUtXJDuWNxFmrS4vCQfb2No56jrpoZUkoSEfExhHhAPVxsoOC7jtb44zsbdBIaVsTYQeonrOjDQ7sZaWibAhTddNUMZK2tRGFkfrCbE287a3vaigJmIhI+yDVwd5IsLTtXRVpr7sOws66GCcJBM7B20ZUT1PFRMfBWt0IQ6p3swYhfTQ1/ulazwlqY2LkQJLrbA0SJ8k3crc2BntbmTjbI9UdPK0dpB0RxuIOJDuEEUYkXcYi7nATqLimOtKBZKs33ATmYCKqI2JtLEKyC+usYudwH2ls5PO3vn+0d0aR+lsZS3uQ2pL6wWyR6iRfQPft4SA3JxuQkQcCLC1sTfLFWh2MsfLGaqoYI+ytjO+L2OCMbOHG4vbWGg4+ViY6tgiQkbAJ6Gedzw87SL7g4MYObkgTqIPNf9zXA2ECJfll9NNPlL79X/tHwoWE7W+2/h3Wv1/7eHnaqIojNQ2cJHRx9+1/w/kfYqPkrmUgZkfyyw1hrEOy974nQlRbkoTNTztNQGAPuPF9cRuQg48J6C+xtNPSFxaAkOJKsg9rIiIl+S9g+1cyfBDG9x3gojBbEhcdbf5mKwIHdrbWMPImYWBvjQO7oUx0nG1ESLE3gdn+rDPR/nf7/ysc0vUicU/UCI0QhdrbqNv9xzLs/jWeQlSE3e+ilf+7uP3wk1T22/gRgeOQpNgoO0HU/4pX2v9lrv7Nlv8OB/60w1jHDW6i4wNTN8JB1GEOpJiIWGvokeLm4Gat6oRBYKA4bXUETkv9xxxi6G6lbuSMANkL62LUPLRJ2MFBP3iqo4Ywhtv9bW74va+KiKfNjzFnAhPXxSj9k57fuP1TBxyHQOvgEBgVO3l5RWs7FScHJ1d5Ni97tDsq29oJ6f06zx5lhSRNnYGFhm4oV6CSHcrRnVDt7E2aKx2Bf5s73YRAgqKkd5mSjQ3K2R2o5mjjhEQ72hFK7HzQznduI1G2DlbuqNzfqwlZ/EL8hSpOjo6o3yZhQiEWhXIGWjmgPVH5rig3Z9K8jArIdXO3cvdwe5xNEoXq7czDodzcrOxQmbqaf1gUVUqS4U4yB6iFcrRztyfkgCSlQKD6H1rc3IA/Kl2dHIBKDg5OXkBdV7Qd2pGQwZ+lo6ZFmJDzZXfz+K2dpe3f1gJ2GeE77K4oZydXd0t3J3YZdhtboCPKgf0OO87qviVJNbuMhLCYlLCwf7YqyRlCo4G9x53bIInb+ijn2yBhkNhtEVEZMREZcfHb6toGJVpWbu5AbRIMtmgUktCob+VOaizyD41FJGXEJX80/nfAKPkbnDArRzuUGyHH2tsd5VaiYmVjj/rDO0IZyTggyTh5kISklJiwcJkKGPh7C/3f8CNkaUAMiv8AysDbGUUoRONIHYQwzii7bCMrV29Cze/g3Ln9T+ErgP2GBtDAiTBx4MuOckQ6O5EWTzd2GVNfdg9XBxJGvy+RZkJmQlaCJLAEbRycPJCkYLuiBG2ccGZCv+NpJuQppugm74VCc4GU4WAPGBcIbIW1cidd+UioQ0hXdvoQWy/3u5KuqvqqIGNJR1EvZT1hlR8NxKRMcFgPB0cTLxcfKXUVMNgVoaQBwRjCPJAwKVK9qJq9obi0uKOWJkTPBSNib4V0lRRxxYnp60i7wLAeEiauOlYaEG8Va1cVZSV9XX0RtIaBEpeoKunN7m9+h93O1cnD+T+Kdq4+ypW0dyAU/ulbLglkmBKcUCllIy1qZWMlDLIVtRG2lRIDqhnDskhDhJArJioqJir12xB6RKKyKwnONofS03enHwA3oGAIGHDmDABwhvQCnM6feQdRUbG8C9MFQ7TUfq/oBJDSeWEAAEcKM0xd+bYJHHH70offe/xIVjZuzoB/P5Fa7Y3/3nYECPjvp8tIlJsN6fsz6cPpSlJOEklDytPY/Z7n/ZG3/j0v9yPv5e7sTsrf/ZF3NYCpkPJIUp7X7u/y1n+Xt3F2/dE+gpSXwzl42PxpN4AC5WioT/omI31uAdwAMIA6QPmH/783ccoEAKR2AYBzT/8ss04EAGqCAACGyT/LOJ8BANcDAYDqgT/L/sTE2crV6rei86TPWVtbAGA7DwC4CgcAqAcBgCumfwDx79h2+zfb1AFOpJcdwAGAIpVAAI4AG4AgKQcCCANEABKnUwAVwLmzZ3+8Sek86X2B7MKF8+cvXLl06SIZxRUKCvIr5ORXr1HduHqN8ho5+Q26G5TUNLS0tBTX6RnoaBioaGhpfgg5c47U5/yFyxcuXKa5Sn6V5r+dTl8DKMnOzJ/5fu4MO+As5ZlzlGdOWwHMJK6dAZw78+Pv39LFC+cvkZ07e+YyqRp+g3R9hqT76iWy81dJ5pMgOEty4SLlJSo2ahEjdzIaOnp2CSU9Ykxs+QgtyMDQamiYQxym71JROapq7fGIk1tUTFJZxdXtMSH6zWW1DK6AzC+BLz6TRDP9Te/fM/Y30aQ6OUoSyc+cu0hxgaTy9yakOkqq8xdoRfSGaNhAokr6Vi4EYsYbapjri2Gx5s8cytaP2Mvn5r+cTgIozp0hOXiOEqAIOAUYvP7PeP7AqLIyADGqAaHLzWEJVOHeXvnk+OJsY0x3FRGMy3i4OKnkVjd3QGUmRU/LSKCP6QlMuViHCV3rWw8zyi05ouKqDPjKlipJpbYof9XCg/HLTX1VVetSxyiDmarshrTUK2KFk1e0GkK4LpwCCMebhfFreJbyqIaH1KHDbJwxI8Y5XK/xmVnKp4APD31jcw7TLhVxbNKdRDll3n+yWvCg9tEa71FLXaFl6W2fboXPm0bxq/dZiqNqZf9oev3T3uEtdQ+DV2a/7Mb4z80YCr29IY9ive8loLkrd91IfZq5e/i77rkxwv6lHV//axx/iPqyF+5tKt3XOU6JFNpBePMrHlJGRblWvtJ/vYv0b1ngudyRe3v4Oy22AAT6PC/x8NsPPRyhlWfNu+EJhdfU2NOTr54HL8n2b60SBeUej7UJfOWCTsuLfRYXp/vuEehrotUT9lGAeXGLDcHIug5S4ET1Iv00Oju/hRmqDYUlNikmdYVwAi5qAP5H6Xw7Y07yviR2Rr98WZq2iq4uB7+mD9Fl5T6QeXUyF+YiWrGzV6L67spoapANMIcQl0Kznar+3dTlUtkkX/De8Ybe0NDM0KfEcYreeK6ilnyemRkdTHXhOegDrbwBVYnHH0WPwkPTzzJZq2CrDCWTxTqfWcB0O1eRqvquWP4p8W5/btSbbAIhqedRXLB6gTMD26XUPUPrd7N9R6W5qlO1pwCBtnx9/tKE5imXsZoGjHLie+D42yZEdqhdHVHeKBXtKtgpd5MqL7+ipiL5oniRjjztNf0+ycjq3lCsKWdX7SRph540TARq9stwN0sMlm+E2lIyl3SVX+nGrhbUHrx/vYF9OcRAYD4gMwVDxAtuhnPyQbik82LadIZEyXtb7nRw1etXmsgqRhsdjpCzPOJmDqiGPHQBdk/PeMQdgGaV8gcDxDE59qq/yJqyTVWTh7d0oAZeOVVtcxt9V54XXk+bhcKCWDm12cBZQ632aR7Y1xG71AbfiaPGsjYWFNM3xdg8nuaBkwoLcrqlb48jg7LfsaM4HyttFXfWxGSnhSnj67Ks7wO/W+1JvbCLnFT2vRX5VO35Rm34MHmwavD+29wVqh7zNmxm+HLik64KglXgnh6klhBlbjK8aHJ8pXZP/K4xX8I2U8xBiVAS2QHnZlOQU5HqJ+hCgf25BlLotfTQhgnmYxyq7f7V6xNK2A8FbUAh+v2C800sxjR8ra2gjb4suFyqdq1Hb7veSksMYrwC+KCVeP3EmpnOLGnew3zr8pNYKcA19/8ZA8+6jnPsxK01PG5eEvuKxjRzqBWFK3tMe1SeAoortxiZvEdTxUYHY4q+eEfOmxpnWiX7TDbk5gi59lWJxEDQgtkxqRtjlATeUKiQNAbJF0ybRm9sB3S7fP1NyqtU/iblhdpXPIH2IK5TQC9YMqyglF3gbY3P/TzE3fqbEjqIlQo/k6qPwzlPFE849BnfVZ8JmqrBKgUNwSYkYrmoPhhX9hRw0YWxw2BgRPyZXjm2bXbOJfou9m3XDdZTAPdEghc1R47x+/3Eb8oPmNMHVriGqu6etaxNZOZvOgU0DDRT+eqUVre+8HQQ2E4OeFqE6LTrqevKi37gSyF90lu7KVOiMCPFKuwnq5g+rJmFrbrUbWZk1+YjFvf02Otp7jS6dYsLDRocRvotIn3seWRDCcNFW0e9x7c3udWhZa+2RD8HzE0cnxDNb+ovvZsod09kdHhPxyFowx+sSEZvY9s1jESR3IuoJRw/7ZijYW7tXDHc/7784AYdeTcL0fXmCjD9nFLfoofQ5PDdzO/ZPsHkv5uh/JHDsXG0GT+hW+v+iuPylcEeC1QtUr2r2sIw0T+Ui7eLZ6ej4IHqr7nT7tla/m0asbej67sbylIygbrKHyAQjA8XH1BXaScvo+yZlbZxmyPIyLcHdr+kZUTbKGs+a6Tb1lbsAqYwIUIMc3QKUEx98gFwkRfwv5A89maucj3qmBVlP3gQlTywXaktl7N0Ctjxa3mubokbsPiw8AktdPJK/Ss+4pWs6tfPb08B6ZLMAE8ywP9mutY49YLyg14VTO9EWM9FD2OZmum8rPDAqdbzxlpf/co0DU6rs/5Z5rLOq6PKX5iD/3eUnvuWQwXD6BEo6XLtFikxvYIb+GyYUbPMmIutWmBsdU30IpwNmBaq4Coqr9UQKFFRXjh+zFe3m08WFe92/Zne4dR7CgXzXoVH6mtOfiI1F8oc7r9xnT6v3vMyZJD3O24rmFVwmfWXrN2aJgZnxF5HBD2wg1AvdcTH/C6lWw59LL1mb3QlwHGsQJub0A1OCYlrn6sslQFFkZtrxnKMhLAq+dUn+J+NGp/9ZlPoy+7JZTXBGmY8PAvOD8wX1QOGEhal8u6qvWnvMXoZl3Zpj6j7JOnSbya4lUtcqJK9WrneXS+cHllEr0aMq30eL1RNV022oxkm3+q4xSigPPiNStdCTpbG47tZcRKxnMudKYnsuTUIvy48nRcKrA4kwojH6SKUlNd0f3pzi67xxcKFHmfKP936aaHVG+wUi7BZzRnK7U2rnSuKpr5zIf+mI3WMtGqo24l2dfA0+QU2nXIHOcfyQ7LO2LosqFYMIq4AtKxwseQ6VGTAnBbeN0ZvBgD0/eux7esyQHfgtKJUeJ8kMj6stcbSst+xTsHjwysnPlhu3sjuajCtXn1yFMz9xYmOXyZZYe46uCMx7RSA3BEHAOCAa+KpMRjMhCf1rUVrNZRTw/JKkcdlSIF6BmXiJ1U/cvzB3Vnlrk9rpQKsG0lTZH09EXvzhG6xeYi90So+ogpC+IRU/8h2LgUgJREfuxNCUcz9gmJZQ0uj9Sgw2nuJMskU/EoaDNH7ZObOoXib0A9gWO0p1IuvCU9pFDJ7XMTZtvi2XcZME2NxkvxKHg0Uf/ukzv62AwDwlHSTMRKlNUU90r1DVJyYXFM8vtFwQ//Ys/XVQtcJs/WOTexDO6+GXB/S/cG/vsO5aKs9P1Nj4DUtIhl+/4Xe6MQ+GQw5C5zyUPdvob5fE07LrJ3uHJJl0zvwsNzn1gre+11Upmx2bxCb6lSYzhc2hLOokGDIIovcQ3tGcJuEU3LLyxedURjhQsaYPNz60YNqFc04/NpzvC1Ym4t7M3ciR7aOxyC/SnRPtDdTRVBszkmiMsnAwlXh4u0XuEcfpGyMtMdtSlw9RWSVobYK6+VmjopPBeHYGWzot8tDivfCMu1gzGrUlyYtJC/umR2Ey5mv+7yhfve8h6XeZV87QCIVmRL2lhPp25oJrdGKf8OVQSEpVF7k52bFIKiwEKFa3pJU7pdQb64STdN7f0VTgzglWFwDwcBzzoMzRwwpKQn7J5QvKATsdCGewQkLgqoPRi0dNB/AIJEei0SdEtVAjNqHPOW5XqlyWVaJhzxUZe3xUVxiu8O03OS4bv9rrS5g2yzUYnGKQ5T+h1hKdIv2BefOlX3TSW2BdzoD236Rh/hEQSURsqqySoOCvl8wPHfwISJa1bbzM9CemBmuTQJdrt/xxsv1QmYz7VgjIfZu7vKGWVkUu+F6wNNJ8yeb2TrEUASh2rRQVJzHHwlWzWKbKvfkCNR6M9HKmoFevWgpra3u8tFgwcmJ6cCgAbN0uW+Jd7/LdJbyy7yC/NxzlNXAfE+2/uhAj9Zuxko53Zcunc211sMUSQgSAqEK9SHQpTQ7d1aXPA6r+/R8yuVj8pNhRk2ihX5/PlFVFYuIGK1iSTGwtbVr7dBZ1DcrWc9SzYVvhLybOH5rinw5DbN9jiR+z8t4eL/Qgi9Fb9N4viZh6YEiIxmGE4tIdMvO4CS+9T+ZgRTvV8lrVQ12RpmLKzjIp9a42Kj5x0uaouP0gSMhdsbMnyA4cuDVm8HbfaqofigsuoyZrHWA4dMtx/mT+MsTF4cGxOtq63pKsVnwtxpJjsYN0y16UiN6TTbg/jpClsoxsaZqtN1C0e3pEjxNLggJNz34Cs+KXJTa5tRwLGe20ZXKJvEou1qbW4ZY4uH8NmOLGBE+OGp67rVLcUZDcA0zRjxZp/brpZ66apva5YbMCStNiSRYlbz2ZcgXRanV6iPm8j1PfDh0hZahzcexv89tVbBE+yNcsm+s3DnFeYGyFoyGxxV8jkESl2nq1BWRT8yt6k18wuUrOxeRe23yhyuC8Js71S7DK3ci4xSz6kYwytpMlPYYecMC9Ce9q2EO6leaXdWdVlZ8LSbQLpxH5Wl+STy0DXNi8XTZlVDQ6P5lmXBCjoiwiN23jdCHHdHHMm2fHqcvVJM2biy6n9fos+SKjmQbl0QHAYD5X/7VZXJ7AY+qkponBnJGQmFRyuyCbqI3hDYyM9vaOloz+vqXcmwnm5Sg9LRV9Oe4ehDEZq2oVNnlpkuZWj2/0L6UDPec0Xvv3DoLnG9T17ebZkvblE5bLgHSx7uIXOnQGK5b9+DihxH9VsxqhupUUa+7KAjxzLeWRfatufZEGJNGYY3eTCCqEap5DZQAggwf3c01lqW28npHlGPEgmkzPiO6V5tabV97/0vfe4+sJ4KOv7YeII4y4pqqyBdCut/OUFTt5lvecgo4zuq72ZN8T0TBqOkTqqxE+dvcdtW+CtAfQ99BwbrEYj9rNnUm9GfR9diyOkbs2t0mqXunAN6v4O3ldxS6P8v8SvwHeywRQV9aLh2oJ0nGS7y2/rNoCbsZjDgcFXrCoNgtN9Twsvoc9c8iVgFLd9Db8Qw/5zcnesuRSwPrpwDmF0Lfth+GmDQyewlQ1PekWWP3qIX357ZucfxabeEXV3pFP7uypAISH/Ay47Oo/Ly82IlfVeUdlaCSO3KTjYojLgu8bhoNq2FHyWGDL/uuY/y8TOdVpUFJmbYJoWP0Pr1HmeZhZ40ZJz3F43QNHQQ4K1wDqZ8Y6OD07CRSxce0pCAW6eMjOpcywjLaf/0X7oP34GnDc6FWdCCjgnKPDcynSEHVl9wIbFf5Y/H8cNhoZ6VOXSLBhTobPuseLarnsJdhi6Dy5vPs59lQvIz/eCRtM9IdvXzFr0wq4828OPveuLyrpTF3i6OAMVJoh8mhyjTspJBsYJ3q+/jyy5CAiAV8xz2BpAUBKLFCaNiRHMNDY2nTY6W2LFIf5jDF2pItDaLg8JmYn3kw61sYK739Fbh/VVPsgFC64k0W/mvembcbDq5NY9ux8XDdPiTGVgtRHH2G1dqqGoLc2eZrLUEdmumajjM4igLLe9Do3GENFXsIFYwIJMOHJ+Af/OVpzkUF7ZBh27vRdNuFXEVkovoovkx4dRZQ8qld25WTcq2Ihnnd2lvJ1JAiyu40AiNPgf4S98teygfhrh4bgYyf56vk1PLOX2sFMN83tQBLqc56afJcBclf62qLKcw6qcE9wyfrTh7CRb+SfyK+S2csEElzVPWCaeLGY6DKw37tYDOKqH6FYFUkElXA+vSXnne7/u7xdlZfs63orvFOIzDE6MDDiF1RT3b8pF7i1xLOkOs7nRe+XWu4c3h0DS6aNJSUQfkJTqcwRWRHf/R+IqT+/VUsjhNRMPqVPKnexDh26ONbrSirs8qX21FqHoKVq7qJEJhWZ8/G7VvP/sv8OHPg0gqanHZBq9hzwGKyBvHhKb2tV8pNN57VaVvbJqXcH1AHoYFbUgeUn1CXg3jUCJ98FD4++qpN5s0/mty4i7MV2/sy57HFz58IAmk3BLI8cXJZYKyieopWuFYcp/6B86XO6zPSofdoaq06fc571NsI6ExVJq1ma8Wg+rrlKsQZ6ZmpC7hkPhEhzjT3/rMbaqYeW42ydMGF6wkmbaNBN6MkApk+aFlk27pyUBKYorrnr15zw2SxD27+cj5NPKXUIwwttNYfwDUteYWteCaKj6u/dmAzOxXiluSuiefgQFBBIYXPUciE0VuFNZUfuZAQuEzP+p0pH9mcr2Y4VBs/UDfUm6JzlwMk5yfEnrS+8m1plVi02pphx3V1GzcQ0egtzStoPdqlgdBcLuFp8dAQv2D/V1ZfbynNCI5bCOOUeCzOB5Unw7/sbT+Do6cDfQmuGCfsR2CrK8o/8uO3kqMpRQtUz3o1cQsNMw0g1GeZv4g8eDruM6zwJZ2867Dbz0rKzhp/fstPyzJbPK0yqkF23vo7W/Futv9c8/aD8nQG4f62G5dnvyW80sDKo1nve7W2HarA1zSOWoIWv6MUhK1Sw25TN/4h8/NWaPjObwaE/Cb8XosszS88vj9FvqemX7lqV8KxdY0+ejfm25tgT95fdfZ/ivxp5C0hnjW+LwEU63pk30uR1CIlqeaaxR4UpjKTt197NI/xxrK1Pm+OaMjWqQsY6emVnvnpE3k187OF0kG9jOTDMSxhJKkzm8viit3zLHmy4yRsWXQKB3/XxzAgRzuF6IsJk1lFcuqONTQWsXjPuEPV9oZdAfEGkX30OK8l9EBXn0n5L0D7POetCNeNYPh/GjNA6p8if9ooZaE7rr9wkfFd5plo4zjYTJfU2N7gcidnUl8uK3SsDqxql8EqSXUKyB+/OYRWy7bOtLZ5QW9eDPjT0C+mmqayAk1cevQbyfjQVOhf+hBR+5uey//nTsj+KfIn0M4N6cV5P2L7l4T/u9h5nIyS5Zd8Pi5pbJ8JsB8zC6GxBddWpKh6SLZ09aQYS53kB+//VKq00aHYwTyR80jE36wmmptqGPI0lqUgrvsl/uR1RBFPYt3vfJp6/f+uv/8zrjKMTcRebahUC3C5aBZLSIKjdkj3wxgfStYEh8j8ysxhIRJXKiKp6bF2yAdCCA9j/5ZbZr5fL9gZYqfZujuPLSpXS4+CpWr3l6/0F76tVWntOTj0bZzUK3gGOBf028+Jl9od3u0+fx8P3MtsfTKZ6r0f92MJxuTwqb/G4yN6B5KVRpOF15/mTBo4PNkQghnOVemWyjiyNqF0eIfnk7kauihp3IXcRwMQuzB3ebKIEKfaSRacBgJir8dkEEuX/eNAUM77iVkcx7sg1Z374g5t6THQUCu0jUWiyxqDrLfl3BMxVOZL+VGiVy0XOFrhvERmQqGsU6Dm0EuFty6rkcnvlmm9xIxvMOLMC7dLszpCSJvsXcaNHN4ppQIEj74CU1QQ93jC8/5yswfuYP4FvRGl2gddXQX6puXSNnqMm0RYDPVF36N2sHrZVakDKHvh0TtPveayGsPv2NhIuErCzsVMdqMdWjZXJlz1lMNItxh66UYqjf1bfdHRI7q+qv3WSW3GyhU46qYUfkzWLKkyoToxIf5GEDqjgHvNJ8/cXY9h3KEAn3Di7qS2BY3T1sEUQEyVxE0JqS6e3eJJqhO1flGs3ffPYz97fNYYSLDT3+Ch7LS9BS3OjeuHg6VTAmUvmlveVq9z3MwB+wbaqjX2lcV459FNguXjUeYRhuJ0FySaq15e1bW6WmTimJu4yJen1aMqBeW6f2nuPrcPutV62MqV+RQwspTZjjWTvWy/kXy2xEognl/axAkCoSRsBztkqNVlgmD69RlxgQKfrQVf5Z0CvOgbnSI+msxiEq90lDMVLXYYrXXAxQs4h758KGljCXl9cNxsh+vPiXANaqsNfpqSlDBao4Cmd6ooW//epPvcI/gN2lkuyr6z64V/Rn+PAxYTe7XdtCj0cqDwKhEZoiMH9oAZTDCygW2fyJHrj/eU546j6DpnystKZUM/r3/QiDLNKKqe1oobjdMi0SsOCRnpaeALuZqtf+FY89jN3qT6ntuVEK68eqmHHxV5dDHqbANf92l8GzoGllmTzLOt4cyZAhjXMFNN2ueJkJArb+LT9PkhfIJUVFnyLRACsXKws/M4q9NUoKbgocM77VhPXaqNKS1E/63hwlzbJgrjfUZv4UFcdTSMqF1Xqt1WgubE+GA7bM7mlOVOsFrHBpKJZOiY99q+ZQsmBhUIGfdI4nvD9d+DOIxTevDDDtYyD1N8w2fzSifpBFyeamQ8itbvb4gqA8KZfHUgWB3/5VBw5khPAQVz2oZPvVgQcCKHKXQHScdwrLHVC40V6SytjTQdJdrmcFUJOeN78VGhK/nb+oxI0+MRu0SaHikPuChjf5xUI4pDL4yQd8s/6Fbi+bcCTXMljQumgfE0m9nRgxGlRejZhgnBMSw2VGv5myPWJrol4itGeXDVVppfJC4nI2Mv1J4DqNHVQi0hQtlusbNR7RF5l/kRfFHcsMNsykmbzw2Brbo7lyiRJxljGpgFDeUuIdvX1bEKQlcVuptafS4o5hPsYX9aKZa7mVVd8STmzugRmajoU9XsHO64b3Ff0g9z5T7VMiw/LOebutuzORYsG7N0x0L/XR8eZoqfRhYk7WhgVaDJEk+rEhkJmP1+jkH7EdQ128/V51y4iU2pJ+s86spFHDcpURIJFeHVDMr4mLhZ0BJZVFRQaV6/wOdMxdSgLc0REjlVnCRyi9gKKyoDCqAB17A6r1hVBg9UucDZn2zVWsPx0yzG6/Y1zzqq45ZpQ3TKelziinI1MUPihdb9Cf1Qirlu92GzDKCmOA8Megrgao3AR/g8Q6F1Lr8UaQcA+n57NoBCWkFhePdy07GVDjfntyPNUe6jL0FSu1+92F8VTTK+LaaqV/xCz7pK4RvIH1g6cKzWcRjAMnfNb2EAvm+4nyOGifsGuHbhjaQiOsHfcCTmq2udhucGUduZ06Q8ZBgK4lydKLpfJU5Jf00mPgV2HSDvFTKyzvlFIC6rfXKN0d0Yl5R448h75ZLrZCxPjNg3DX2waRGX42YiZ9STAKyMSKbtxzrj+CHJs5Oum0XRVzLaqmy4YFPcc8DLXCVCAuY3xXIgeiSyx4D9FsL9K++Y0ppxmiMrd1ZT29ntBtABhFPAdX+1xn6zySheHL1Y3ps9jwnRsBmoFPxqScTBZ4GY4b4JvfRHzLze5eyfn4KDtnhrOdz3IypvkbfzrVYXJaYNfmWs9TPFTL4nTJvLSb9YZ6NRzWlnMsCXsWybL4mjucG2y3sra/nDd5xOvEhT373pjwn90qiLTVPsBUXpijqj4RJuWvUc+oMvxfhUsQstjxhictYD3mxWP4qxWnlVGo77pRy3CElp9nbj1n4bmb21xSimmuncG+4+nCkv6cHiNWBlfUT2soNzoYase9tkp8ojKHSxqjCafZnJAmSllzLNgVG+TNp4Rypc8SIiKJ/0twh+EnHs+p6vP1MoR24WOtlFswGxU0mZUOaekYjSps0cLDdmnernmGtVO/xWQ1/PQT85I587kNyzWYzmxdx6eqSRvD3tLTwaKFc9WE6M5u4d1XjrZQ6zRSvwiTQ0rN3EpNHWbTNlG2qLasG7k4aED4gGFZxCvh4W5Fjjcwpkel8po5g/OI4uxNTf6nQdn9K5ZG4XW3m0zfg+9ApmnYW3Xw7ByFkD24ZlAans+hFRxdBO+hw1fqA1+YXaKWrQCntnZF1Pr2dAdo0pTi1DrObpfPRr/ID+CL7yXdu2xZ6CdMzwMm3Bi4yaR14WOUxXzN1jNOxHzZaaAkCjDZNPtKtar1yZoKNzszF6/SR1P0fTAyg27LeKiMCHb4gyq5WTFY9vLOAG58XwQ7KOuUrCwxBeHvrzNaOqZuJGqx6KggGW5TrpTHUTqsuuRVULsZHdYYdAatZ4La/BbZruuNnVUNwW0StUrb6+NNdsKZihg2bfJwcNT1uQxI1LmxMMBhyHHCXP6NmJzWnCqDAIhFj+fh8fhx7RYxDf28r93o2Jbs/Or7lM4MYxejCzaXJJsmGk/atONBboR7v9gGXly8znl7VPorImQVzy5Q3je0BzHXGlONO3XZ2YTH6gQ6v0mfGJFIjiNq5uvuYq2DZ1XpFchY26ijypd/yiiwLj3pJBX6G+I994TEzD48fdXqH8gnzy4hwjapSEuGykDN+LuvwT98z9ioQp7jN8CU6iKTaOFSILUYytph0wS/fKMTKQZWDmaC09C6d0zRG01QUiEOUVC9NvCmK7gmlui6OJhjM+i4jLlDeAyMsdsqS09E60c4Fz9syLVz5s1YBb8vXA4AK4QUGzHkvKdb9Qy8qqj6yYpATpqyfvciaVo2x4Gmf4qIeTNMPYzeBhFsGolvuD0YrS2Q2tfJ8L6k4BH8Hb+7kFX4ud2KoSjQ2fjsEnmoEo5RDxPIt7VSOGGfyiuvuE62nypPENTOe/yqKp6yHBc9CKOdhQWJBz/zwcxJgXdY2jj2FUQgS1BzLRNwC5jeF1Ra050DjXgcV2c+7eztotKcSXt82iCm4irOV+j+t6HG7SNjrukVdTDckASwzlbxp+LjD3YRrF8yNucm1t42c4ZeYj+ho1wi1EP1tb2yom7dM3mnpXT3OXZvrlgtYxBglfmOOmvQQXQDpU9F1fEPGZVI6fJoq/UUWpLLx2mZrqv2publQcLdL2pcjb2SRhw6H+W27uS3AXUrVkmysv6gLBPnPejMJ9tlOkMH8m6EC1ei2LpuDpNj1SsYtzfDVMNZNDb6Xgk3a12sm8X7Os1KTgwJK1r8BEtRJWGzx97FrnqY4VdRHMPGvi5MlOn/tF6uBT9OCuJrLiFABnozPo7VVXjW8058C805Lkn3oo8syqx2hw18es0opHhPwgzD7SMoZjm8mDg2GCbubzui569x2wsZd/Cj1oLH5NM6EHSNiOrJ7iaSg35p0r7uMbCWVDuF0HbawJPBsw6121eTeuP0rH/8bmzls9UUELaDyDi4amIDAnPwnnc5s4RPXUNseJ59fndqLP2vdCX6yP8Ca0iHne53hOcOG6WB0v/PbqaIbm4A48R5AR29obEY7348U3ac7sR+C46TQ94jtwh2XEL/uDzcXWUy94vM3SvrZu3/Dp0MdwcQ7Prfrm+pG5FUnE4mdgvbWl3QU8oyryExKUDnBX2UvLpqF6OYqzNj2yd3M9UaXvTaMuT3jcQX4brccLT4qXqvlwqLrCIJfnS+5CYfiXnbED+WlTuCbZxIoq3+9pmbLvwmUVDG5K7xa76rEmT2kYeHZ24uMWcK51idg6Tc60lDYRG1rzk8aa+MYMlaSFlwYFQwVORYKcbyx5gAnUN/1dxlprzDh5+3hef2NJaZcIATMDH0hO6AVV6F8UxwpZeVIA8yO5KguqF1slRjI1wlW3BHQlcmV73cYo6c2HnwPOBlz52O5nUyx/LPv4meb0t4EIj5BXIc4G140ltnEKZVOaMoeME6zh46mG75076FmO38DHEPtkruWK9CUyRU1zTwTlDx5XFFZt3mgR+riyua/NlAQASBn/dkpzqXdRLBokeRdstohhE0mQe1m9sxM8nxwqcv0VGzB/OO/u+JyrLarDva9ZlB00Gp0nksh6vXFPKAMbOcoSqNZOrt4yxLKsmWw92Q3ECdR9N1b7Lol7kqDaq9xTnfjd7QHhEk70cPk7O2Xj1gvHSP3DAMtA2ffWM0x5uxdfka0N0Zk7HNhsG9PcqUenv5VcueE5oljqNMx2c/aI7tOYYr74N5XGPeBQ4cO72M/B+PI1ux5WQhgNI7PBrbTUwwl9FNhqY+YjNq/+6vaQauaMRw4Xet7MHTRaxfHBVnFAZ6CpPdHAqHZWP7irtlI7O26Nu0qwX/VjvX1vmMz+DGLch+Do9sk0dCTFt+ynlhDfddMsg5Nnb+duwm3XZPnLwuHbHYuhhVwd7yzO9ndWQyLmoUmw9NLw7JCY5b2jNViKe+fx1JpRuu3jtd8dJpvdf1cDLviuVNbM/NNnlHRud0ETjmQ/0eUPZC6v79OGpi9EwTrrnhdEHIbHeWbC1b3bv21JYAd7iZny171UvjJSbP6FNFXbZN7k+XG/FwcMGdraFytY8gIMEcHWxm+sKLPUcRbk2mycMZxNzezsJ3O9G7kPzd5nK0v+hYdXqVHjA3GKHDTAu/NmwLuZet7DPlGDaz1+sMLHpahUcT+oL29EyXK2MupOYhLMFmKxcl9DwQayNtDyJNjYY7vbj2wYIpv9FeIcvTyKPwVsy/XWsH94oW0w5qeLmt2u1D5M+RXAgvjtpx0GVz//nFVq/2/NKVqQL3vx+dr7C1dc1pY31R4aV4qWmXB5PpwXVBxl2CczvGye/i08aveifPOtg460is2iLaj9GPAz4Bb5Y6x9wtUHIyJBegh5LnxYLB28lJ0YL/nOv2YQUcAPvKqdHRUikhas3grD9wZVIuGIgtwHRuV3uJekb/Tus5qGu1xeIv+OzE4yClVgKtLnAEHibSHR/fpYGtUZL+b5elJUMPLs1ybtdvsOVTlif7Ve1SswmVFKTMu1u1CqeMJx8JOU6jWZd+omSLzI5YX/ztjb/xHMlH+D+b9DcqaXnkKmSSrWYlM3pexR/Xr8+TUEiwp5ikrYO5eLmQu9tqglxne5/SvPj8R1QlweioxJ4G56vrw1sWFvq14gcdQDEQjpH4EtdIRGpf805NpPKvGowWe82tKFTgHgPzxgmxv4ri2hJsTWsSLxSGNhAFIiNm6ibtUiv0xXIWFcZ5k5Pxp9CsCnC+9G/YVAIVTyneKisMxhEiepDaC6LzRRai+0w2B6fQ8frCoxByWleN0/QjXrbN9PPwyFUp4CuJv0pmYqB9aSd/Dg/aa2OnHd1asPbmz5h7tj0keZjtmrbIv4czHJPwafdVD873PIH6Hx/TmBeP3Rk9L8XwGZzvx3C2R/7xL5Uz3bn71U4R4VS1GdbeSCv2tX1PCvSgt14VcU4Y1Q+2k55bF8ug0Ty6as0cRJ9N0p5fANLkpKwvdsjp/df6VOkNtffJNOpdJDrFsJx8enyBtVUsGyt1hjugTYRbdXClZ9ErJYFZ1QEnSxcMNSegDAWrjUz65dpuV9Sy3/+caBiG8dJr7fvC78suvmxD2y+r2yoiwjlfCtET1mvk/h4rLVS5tqWOYcIsDxxWeksH6TYX7hfYKATJHFHAXo5EHalvvQn6dZ5x9AFk4kR+2OhoJfd3TJPWw/jDgF+Ai/Y8URTog9dQ839Pl3f+WsDqqs20gVMYFZDVSXV0VPxu4aakWX50hpwEYhXHx87FCqHD6oHiWli4IVl/EElxI6QtYhzHRU88B8kndHnfbKi2hgJdUMDsjVwrlJKaSPIKpqiIoODtsN+Sk/mTwzWK+xnuCabgu/0HZRFtOvqatAw1e5N3Q3JIMm5C7EpXU7O5YNLcrJR67nlOTxATpSJ1IfUrZio0vQl4n8GArOpzNLdxDPjGhQ0MaqM4sDafwoXkbByBLS2LLP5RxxeWYQ7omIQBswTWNurjdtPzDHOtgP0zV70VWxVSrMw5x4n/rWL9qzcyWmZRPl5dJxhD2jYOtFg5dm326Y3CsGlZdgK6fEqTSm3QezD3QNXXFJ+h3JvzRdwvy6AszGwYbThfoLsUsTRq+0AzVr8VODHPwc/XlhnNV7hymiyg3ncDiiroqCnHhh896z3rF7lRJc3GrfN2WMefiAzxIVtO9Q108y2mfpLyIhOinH02QiavvXWs6MVvsTa5cwNgRDCy0fHRVRBhSeYOrSwQ1+Pzv4UjchS4lo8MBVnEpTX9CjdIbDqCv8TLEKb2WjgWp/orY5ZXy2sVgOH6/0RV1XtDgTfV1MCVR+ZDCA5lrvmdEne5oLrmm5tsG5p4A7D/prN0KT7DlrOMA82tGmBS72IJCLgFIGZw4m+oOsg5aWTkZ87P0XLqOz+xS4hTQhw12jxmaGRONwembtLef2Wen5Xcsfx6aLdo7WTw6OVBDiYr8A/Oo+/O8dFjB8PNSqPOkMOlpR7gVLn0QDFcvZHhze3avQlTce2SJLhU0BbsBP3hU9l82P6cqr6xJMD1v8WpoZFj+LAdFA9HtmEWTRDUgiUBuqcB6sEGe7TD7IbMhazv2kg75to4ivdDMXSYiXuAC0uSovvPCsM2b7FMCrXf1Y/HbEaPl8uW7/aPgiKRJrkcOSVOm309FYfnNfDwoliKa1ATJH5/m2USWRBqIncH/ATUe2FcHYzstcea44Yf1xB2tL/1ZlyYjQktow7SUMVbGhPFNSFjVaL3sUwmFRP1Wf09j47IOJp9aCQUK1kouTvr8ue9AnnMlIbzAxqRslo10+xyUgBrxo63e3yq+NkjLR3uGurPMlcxPcY0N/Tuy5iWU7tpXrtcO31R83L1JRxaTzN7LOKOv6KnPxxtmCTRvOB3tNsCyaWdpJX2RTt/K3jJ7JbVwebkH7Z2en8jDMuIzvW77HQYAeEFdHDOrDatu5RoBHv1J1Z4zb/LhI+GJ8PCZwLLC7MCFS7rubDrZ+6qJjNDJLrDiUDxGxT5vB6EKb4GhpnLZAu78zUKjmsHwAjqtkokEw5kBgumEGsZTZnd8IhKfJ7YNkwGe44FeApqIsfmzxk9W2kJwqS3XG6zLBTGLEGaPDQQRlCVcGY37vN8rIK/4549OLL7luutiJSATfgpQfeA7HJHGytbHxfa6aZUerZoKAFD0qCjJCkcqVl3Tb1hNzqjCBhSIpSZu4MSYQ2eA9VZROjOGdQd/lvNDLeixJcfvM85xJUb8ezn0nvlu2E2Li3dn/LABSK0iq9yWDYO6EDAcBtdaasNDOp3OUBedjma57QCnDLuF+XRH3NLZpo6KZb9vu4M9991KPpa1ueXwkGjY/xU2lulLcp6mUw8+ZJA7i0GNkpBXXmXo2rY8kItq3xUwXtqqYPGWcB+vPFzRDk8VyrvFdgRLL9XjjMygZE+OCGZ55Cl/LflhuebOsuTwlrD93L/V+y4vJqrxUSrKUjjvZ1JozPFevQEORSDT8M7Qh59lk5hSnpkNuZrCntP8TzRpHzaq1Wqu4Ic4ETdJkyzbGxy9W3wBBwHzYfAiM9tgPnmSXe1llH1WSeRLX4ET8c4fscddhKqJW0PPGDrohYpTRfheEQ56i71KLNsWn1oiGc8dNut9LX02DBM6nnTmx5Dl5a7nJSBuwWBG/unYKsLR3BwDkhoLcFhRExllOmN/c5dr8uQhdKkr2lkg33shJ9adYj2pb0yncqrUJj3pwr3i1xDdQGv5v7kCs4r/u7PvmRmoDALcEfnuGg2Nbl/VLU5bibITERYU/F6Ejh6idOPORUwAm0bhkqDGGsM7e3izEG4c/YEZ1pfDxyHCQQ42ZR+sCM2FTuI2db9ypO/nhzUAvraj8lc8MU3rtAiUrLhU9my5+he4pxABOH3YCjl3/8lLMVAmfy/5TTeSgKjJm094WjEgvLb+DiBcq27i5iqd39qqpR7SDOCXL0j2QqPwj2N1xG8M+41Fj1cJfxxtSTLS44ghHVER2ocOruclL6srT6z5qmy3lh9NutrGysk3p7AK9RsI7/itBjMvpTnEuxGloxPA8WU2MWmndF3lZFpbJF4e1kZLJ7wwcTnZRTYWmybX+zwMxyNhvgoyMAiojKtvX3rDyWAtv16Sgamj7Cp+Crd+a53sIui/iyqATY3G0NqbRr7N64Mk23OW+19llIaFs/KIgS2oO71fnq4wD1XNT1I02OqylPf3RlfhNqz5UeSkj1KWnqzpSkiWHaMWZC+uO8r62Heq8T0ylwwKXOPE7u25aEs3awOq4zgHzG0S2I0qx98XuFvoVclGgPpztcfq2u6x2qqJKd6d/lSu1ufMAUY8NbFenXlDAiZ573WtrsMC4RJEah+JvoRdsfhJsJI+GsHRL3pn98ly7aSocYYq4XrULmuWilDKj6Oz0uXWO7KsEZXnETHivTvdsUK2g+oBj2XL16HiaCmV2xe0khGEDuFxcijgY7vskN+bLSGxlSUSnzoSj60cJtHGVTPI7EbjU8EzEdNgEBVV0oa+NarIOF+St+z5+pPeBpfqvVuGlLA6Tub2Os+1fvpno9CZlvQoxLcH1Xei5Ea0GbEuaRql9uNnrxFigCtETMrx/3i5/x92gDt+bs5vT0XVeB8iVwYdwAQlNKjzjZp/7r615H///W/NYcOthhpuKVQ4P9mn9nUsscqLP2rwoxiZ7v5suNCJy28pM3L8izpsKKVKRvyrDsPO0ueu5nJ04NzlbB8YpNGAss+LxaJrWKaCoKXrkKGRYy5KCxbRgwl/BUqVCa+rGa/iAC4OY06BspKU0Q47ky9s7xMlpBTXs0k33GGQmF5dP3qPP62HAOK2A+PXCyaJzZc3YtewpSHVJqXeAqqE8KxENnZ7hoStXJtu/jHDDIBFPdkIp8ZSISNq4BXU0Z/AE6nwMG9drPL6Wkj4hZFRV/1z3hJ3NnrrH9kxR7Sj5UutA9iLllyPJJAngB9mruTSIxAJei+wsXb7c4CecCEKkbypBo3ZCrdfMF7lbS9tQH5Ynq8zxRRouktVrtSS1OglyqNFIA+kcRinPtSGuQwBWj4aPPJudQCllNHEmw19ToBwwguwt1b0sNlfDru3rtfEGMXKBBCnTXLuM4giiNVU4krFJ8kUxtDHFVnrG2jsrHp8XKZ/paGYoz2ZaLJPFGYePzqAODDhOXFUrwK4KU4j4L/TCHbuK1yVSFSUrJpg9fenZS/BTY3RUGSl16DtU0RnAkcXRyeHsLSYau0LvqJ7J0ZfjZ8pX+Xt9ut9Sv+Z6wXjtZZRfAWNgq34a03BMYpbBFp96Y53p9Ineu4X5RQkWo68RElXeo49FKSQxzNero7HYnac5H9wd7GDz3NDbSwyz6iyioyPavDNDzXBMNAcHQmPgEY+BAGzk63X6SQnqeAsELTk8ojf5VnGQoLt3b5ujfKDtrEqYiwDImLPC935kZX6PCnDofQlo6vIM7jJOZi1TU5n1ecuw3P5zXCxKykI9APqyaENCwPliAZdQNZMlj3B2ju14qF7iN067BkTexi0zU2QiyrG7mlWXvSj6Rfu2gQmiYgrIin2u3cdh8UVI7Yt3E5moInnyo5VPHyiE5Rb0y7XKJbqeQ+0wNQ5Fos11cR0OlX4pirGsD2Nn3w+Sbggri9wjxjq0xUcD7Ovv9G2AmPO1CwLXngra6ckkDK1T0k6ZuAzVg0C3uCgrMUKXDB1XEXF1LFlDrxnueV8EN5fH6uFgQIkBm4Ub5UYrzMVFkQqgl+Jq0UUrXmpUsLm87bTLBdxmkpFml+z2TLvNJfrXtlZz2iMThhHgROWeHnazlxfLUS7DqRpvm6Z6INkNS8vMs1Ki+dg7H9+kguo4Xz7XhoZy7VHVcCThQ8GTEVWQREpsbtInpZCwTwy+d5pvTrT3v1hHky/Fm1+9KrdXlzzA6CQz973QeTzrFHDfEECOYbx/6cAv/cj8YS3S6LLcn4vwpS47obG3J02DnwAA76z/s2M+jSU+btE7jvohtUNVgny83yhrmyAQnSY37vgM/wATexNjcHXwIiQiU9WLrCYZjlHPWFXfY2SkJHwlEBgvLznMZqg3q3Vral/mq09StZzpjsrqEg2KTI2UMKppZyZQj4hWSxSJzQ6dSZ4dz1oxfjCT/EWlsUR9uPR7veT8kz4uIq6oh5VYK4v4RdR3ctSff2uq4gGvV1X+oeH7Gtvju8Nw7smGFwcM/1j/dNKTs++CtuxYtaSD3PclB3XD7d62LdB69eCqu+R17bs5dE3aHqMvW9tdu7gdm8ruPABfe3vv174trN5O0SvHvm8ZP21RRd+JxlmYHEcwl9xmbNzAKMR6bUy8opH9qeq/XF8Wd++QY7561XmHI2yerKEQRkTGxKf09uLDu21VeVeyBJlVGTbpnoi14PKbhiGUEXXTVJnlNjBiomu6/X6ywcFWN/dAKIbogeFMp55XK6P8RS7l1Z2KbuukO5mO4R+jwmeN8bWAMwucvz2gGMLTMWmRIfk+gq+1NZ0V7RP4/oFwHkuT46Lxrq+KzO5jY7sT9oXvSlHOQXdjJ04B+NpTwF2babaBFO81hdWmndxI2HvAmde1HDnZWTgW5Wn52alCK4q1Fr9JjwU4fUpPHPhVZRD+wzRMuSlM76OwrkSs07MucFB1CUXN08pEPp6MRAxYPSPRW/2BWgxYGxuWP59QB1kIT7nL2D3RCB3FSEuq2vp8bkhSOxo6/gl61iexOEg1KRhanz0rf7Xd/xnjP8OSXa5d5W2zLfy/Wq/HWtWgnYI9+/xF0Yv6KOk8G65rfhUiPlTZEkHOkhS1OVBBJ96Efjhn39uxOVZCQNW7v2SiHpMm55RxnCLHJazGRSdfRFSO+ZGnU9u1vSpWaP3e+vuw/5t61tH1yYQqnltVTwM+4o5CHdksrXJ0Ehn9gzkgyGTM56jON5PH6wecC5LEQd9eaKVHe5wZBM3JVgFS5HNiM1MzOQb1uCTicpM1tSWIyjceD/Gxf1BVNWtVtFkuT3akxBmWvLUYBJzp/T87TvojXbLceNab7/L9Ms+JqL7fhkLaQPop4H0yfv3M2ykMSPuifGmdl66WxZXKk7TOqlv5qvXnARdg//TUaEv0wobQ3eStdpp5a92oykZlp1cM+lGWkq4Ch40ZqT5vBCpOAZQuJ886apXzoeungFazh/kimzc+9svtpntY+uiDC5x+BXww+4cnfd8e+chLmGJ7H8aHDOSmf9Q66Lh3rWlqguBQOjpgLn3fryNvp1otf+mX9RsA2n8yh8FYQXSB2Wprbpr2rMWfk7PPy5po9O6DSfjbwf5/+/hY7L+BMZjxoaKuV539KCkOzbX/8B8MZdWjFWoXFZA5HMd2uUD9vQFk5feQXtnzv+BSfmBJ0XNsm7cM5zl0U7A89vZuyBx1zaB6BThj8E+PxAsv/tx7JvyEcOFt33TYSZm9ncXzv8Oq8M8tqfTXKHm7GePZmKFC0w8AKfp/+vfL2VH0BKqAGKkYH9Imdwr46PDkDfPDmgDQBK40GOnXoIByPAVA7bqB6vyX7gEA+f+EXfPLL+kMa9cfXvuQT7X/E8ZzY30NybpFe8P9TcGe6vO7eYZe0yJtHa+EjTxIm+5bxf/mvlL4t4fxRsC5vwKu0b4+I+39UGo91lAL/1G8evjrNtdqbgI0UvXxDIRQol7nJ87ODtQ6wO8/f17GW3Yh7QX4rKiv6OFs+MT1h2I3J9LHdyw6fQ8yVj4lOyWlm3c2SRS/BjvZPjQd8mXbWg742ZKJe2o5762LHw7+a8Rs70lN5p7K+76Le3a7jfTlLCf2Yto1fbql898mK84wNiZ+d1JbD3mVdHV9YM1HCCl/PGj/m/C2L1W2cr4TFRPq6Bemh1cYGZTejcsK0vZquiNuqn5pfxpidm+oU8B9V4muvuMN0sf7UtZ+hWLJp8an+19VyiQGnhfcvF9Y8CjmnYFUH3JRx8qlcEZ5V7ffoXeR9dfyD5lAih0gReVNuG3dSil3XUHCVmpn4pNQT4XOCuzVbIfJAN7KTDmMefGDyQMRmO+9CRxUQavPF//W+2b4QU7uXor7qDE+dyQBNGnqOP7o6rNQPih6lp1d0Jp9I+Pwp8MUeeKI3i+GuqWL728a3eOQ/MM8pZ8+U2bimjD+XvTtYwlkfT/9/xNQ1Sm/0cNNocgiYcrkP8B/9iegodUHBbvTlv8gXGUnUTrQVZJWkPhgi0VJOC3y1Z5rSkVy3eeFOh/vCBLyagbf+knQvzjzOuul9UlJ6NBwJVvfvL4jumlyOAs6WELR483PxbkjCtTYEb31ZWFM28BQbIxeixNwRu9/aUH9MWolb9c7Pn5VTTOc7uHIFWeiRLhZyrsD9VC40JEykIDvtQjjryzkzRRKZPqFQ3u97T1ZMe8C9qRzbdbXa/390lDZxgzb8U8crlv+wOlb/6LlvwPzVWrcV/212v+crVF8np6eqnAH6yfVKZ+/1hQ0cALplmw6t4VAh7f0we85SzygMGTJ/EFHy5uF96Y/h8Pl6tSIgal+mgYb7srCDeqn03GzoUarLOhW1nPQWDZVBKGAp4D1AusG04RtPRQbWlpIGva+BFF6F2jRPVO9iimJXqzr0R2nl7By2Fu6y30bOXyP5anmiqKdtBe4GOlSJlka7nveDigq7ebDshu/edEx34zv5nje4r5FZYrj48zhOqHtVVVVif7pL00RiXi9bQYz72Wj/70xaPyvcOvb0rL6LyPRJWYl5aUo7ppuCjuOukmyKnEC3xhxR5A77VJuHcaHsnU52ffVgohoRdQs+QJm8NISuWY+dDATRoMg/n/NfHdUU9u2fjx26SgoXSABgtISqggoPVQJAaQqkNBDL1JtiKGTAAkQFOkCAgIhVEHpvReJgEjvSpXOz3PeT899953x3n3j3fvG23/Nscf65p57zj3WnOtbe83whJag5xExf5yvYfozhRL+nOk4C9DTlMB7ZLzXJvQ/FuYz//H345B73f1jq0kS4gCAa8G58jO2yciZwt8EaqvE7HgmHqzFHrX8EFdtorxrs7/DQweF3gO41P//gR92FkL81rbEN2CnZejQBRMeVbc6fi7xq0PvGjbyvIA2fh70tH/XC4l6hfh6UEm0o6N7xvS3qaExJh4jm88JfA8d7x4DrszncnEkKxg5bumO9QAAE/+UPdsfWQoEnYJYSKUu5Jf4HQMongahv2eSqXuUlaOF3vnZr4N6B/OsjwDy77p+N7BZxxxe/a7DLHPJb/TSiKAIXw3DwCQR9HWhaqMvrd/8gWPWkzPv6eq7pVgIHF4NpH57iXxHOUacMJ+NbYdStL4j7XYBKN4zK+s9vu9eNOgZI9GrlDkHrlXO8uO6gm3/PaPvMpvrh+bmSBbvI1b4pUypKyQK42OK9VI5z3Mm4ZgaNoXZtZzOAU5EGxAQY+Pj+oqEdmO4dL/+x+mhlyo6ZB2yaZOyxuAF3m9am8NKPTuAjywV4IUoky21wHpWJSm+NffhXNnVK31RZnwSpWsew6w5ixH7weJNNiKTnX+cMcySP/MpOa/G9BEA8qM0+VcQrU+Ez0azKCyH75cw/ylyztrNOe499BJ4AR/ltP60rQmR7BcSXFnXIrIV6kh8utv/gDJ+u6vos2syyB0G9bI/eHxR45sW3jabQy37rOpKrdX3ilVL5ll8TeSG1ql6s1CCYTzujr3JwEvRIZjX6RobmB4DNqlaiE5xj7WwKvRpvnUVKbkQlPp+yONurNJzg85EGhb9LUYu/CKC8EJ/HjXHCQSfiuzXA8bZ2Nbm2CtdIMgQk5e76Trd8wTWOTMqvBvmhcwcyxhHMYdvHL3OoVVJZNLiJOMzlq9uPHDMPquU9QrHNm9YXRdi1QALr9PSr9UiTLb5PA9RXdIB1cN249KHk7egYvdMS5yc0+pCnAQbVLEqp9soM9yDOOtmi9l95IsVOgsYO0IVVfV+52VpGNLX13jbw9H8+YRoPqrL21A7BiGL3KrLm2m3Opk7bYJoIE3iSoFbEiDZNj7PcSGtpi49PdKyGn1+blkraEDCmqjfkI+lXWk/X5R+o1hpRYbbieSc7V8HExKcyGeqVlZ6d32b6+noVF8OXw/50JMo5Yx4Jr8gHRp1fdR0pVzzM1o1U+DgNL+2VrjihHz8/uX6PIEJhNprvdtP7Qb7we6mqyJlbHpgOnDj9aESXljAFETuXETEU+qoNxQ0VWtt2si2iIJ4UZXNqXAj5oiKeQH0s0qYZjEILFw+GeqXSYNlVS8rFToXfBoK926Ys0aRYS5ORE0BcoRGvLHNAShfW/VeSCwKNmZSdhPEr6TnoFb/aLMJXXxd1w4vOvfZgL0Cy2pKUzNASH9N0cPmZBI1dQVGEhmSLucGfUgRuZ/APgqvYjM/L6VtCva4X8Spv5zLmwePS0c1XMePsORbXEDAejb7dU6Id05lrT8vWcleeSqXkRWkL1rkEcrIy+BeloPvreIQ3q7HOEOra9bjkKZzlcrjxMpF23ntlKKYxjp7lnauyGtAZ167t0EsR7BhDAne/+HmXHNCxW7wqv5baipoIqN5KUOtsDjig1aIjbplM292YjbiU6gU3MKWxmxujj3PxGw3NNeRvapxyZ5VkS2c0Ys9lJYU9ypcRTo0MmMHu5BGpdhTvpBD4bGO8MpGnhi3liFpkSntQEebt3k9oebtI+y2jhmxTQ64fVBqi12PHYzww8N6q5yVUTdHK0ekXZANwWPamvFqQgYFcZm41OzpELPbxX5HWmUdA6aVD1RtzFS2QuoWPIbTR1YuXKIO2c24SOVXnamEWQIb55NHo9yZCefiMjnDJZh//4UNsvJ4wnfwRdx+rjAFi/yglqzdEYif4QaP3a785Ayigms+MUyT4Fccg/SQwjM8ffnmJF6kXEqu5XsyxsekZ0tT+cnWGoeSYdWIWy/TNWAhwgYivhvkJGG6lYSDWKz9b6OL9kEoKXyUzcfRCsPlER39DuJC8ECwdLvtRDEyU4DkhDPhYhzGfMX4izRdG3Yu30pOys9LyE0KfLuwXmSs2NMfYDCxmjmIX/fUW2roVuQaIltGFN/9wFOmM5smzKQ7M3YMOINCXxTU7fq2hQbr5A3sOBZUK1FHi1mWDuMTVt9nq3dDT7d03HEr0J3J9gwsDp5aABKgDrz2oDZeLxvVdAnW6Afo0nhdR6bauhRym/ObT13T1puB6a5utvJ9VmX8bru5vMaSwfn1EmvGOLiZ2sPh5Tzm3GehDeFGrWqN1F2JHG8XtgvywS89DPO+Mad9+16xNTcpbfiJoa6Q+25jopajc93wTPPQaNknz2x3excCuuENvq+MPm7nDK7fgl33bj1vFyjOlmaZwJ1KwBoPmQ2s0NZEyT01tBXWZBhz56mQUBe/+plnBm+SL8/wQJCXhwObPJJdVHWt2V3GZ9jDNceNz3Aka7x3N7gDp5jR/KBChTPMiG/DTFyEpnidvrbwI+dpA1pldc/z0azK82Rn0mh267V4DuWslwp0Yd9xsfCKIdSXr7R8FEl7VGnL1pWpbMOI2/osic7u8NJ210Qv2tQsuAkw1CZWA58+gLNN5ay+YnPVg3z5zDpfu93nVlQr4aEV5U3Wdn+um1b5XuS4imzzRGaxsSRM+JT6hMitM9gGqDd694ntdmn0WlRY6rUSl46LCqVvHkZ4GtBFd3Hlle5lr87/qEX+KfsaPwoVRt9jQHdTbemnw/60t7C5Y4AC69Gwy3edaDWpoILouVsDHL5lDtdLuN8bXabFbzqKt0dtdDQmUlzaI+Kx1fVw8YOwIRwEtHtzpiD9bROt/dO+lQtxiy5pwkhOAzpG05gpKo0+v0yYqYMGDhkEh3CZ33Sd8w1rt5McgXv7P/1S4v6bc0C49/Kn2hr3mp6MBjjNnXgJmupicfeQoDNZGanyXF3D8NU8e+lGSS5YDNLD8S6pK105bMWLMqulfJa7ZYa3CqRFdn5gD4Xl8KUN3vSJib/sAfvIfEKELjRCJgq6rgwTTKq7s58TWHHGxuFBCig240viy7zP43IPEuaMln3o83Iy8yPhdqG6SMNvfTm6QuqEStJ6mwnewXXTze9rEmZr0uycbYjarenEl2qPZEnUwiWgEoHQVoHcVHMTGKJCwukud415QF5lUpmuv+uQbaWj8M3TQd1vs75ZtJlK05iNmac+MPOBLbUuJazrlr3g5XEL8Ang8Jzl1VtIdeXIQxcHaOwUPynKtHGHIIvDkfKl+YywjNXSiSTS7QwMSdzOD8sl4yAqSObQfSRs8PIkq5C8+gFSUZaKwGd8dSrTfz9MMQOkNtPzHdP+XIb5iWvnZLa9xDx5ZBvG4Czfx1EQyFdeCCZQrdtuGMaCGI7qOV2bWYkh21CnIBqmreqTHZlwI50i86fRFaezEwbrIpzByk1ZT9DAJzywwscOyrLbUSV2dqT7X0WKfAfNYDvf7DJE19s9N4H5Huwwt37zOwESIsLpT00/Hj5CTBnubDoFgtTs7BBJ31HyS0A7vRx2HKKEzweRDpn+7ilHXYHRamhJGoktWaMhzzxgFRGCsPKBU80gVaEOCQf8yV6zQ63VJRJJCO1UpAa0PyqZ2nQlJIWvdiTzMIjDysCYd5nRsk1KBgr7ChzAWTwvzwTG4Qxfnn8d3QJ13IaPdzsgoSXBhnp3tXGEbK0qG+2k6UwBcSzYlpuWhUBDownhGcCUL6qJBqQLPRdOkS1udhGrQK/W0hbnIi7HpemBz+gkN67O2/NOKAdQKV2dqvzS+3W8/oXe4yFQB5bo8CJjuLm4oC3be/m1riA0DAg6NGgTT4OYGFT7fU2REnMj1yYir49Nj5C5oLzOtWw1oabCWh2PKIX6Ob4hKVGDvpJpXlxxPUOlINM2KCwRiPFp8+d6e+rLetHT5pgZ5Rz7l+vmuL3PZsELlWstEsSqmUpljc8DGBJvT4A7bbPcC5SkoUe+WIOGX6tZRgZ7WV971CrO1xkmzHh+7FytIAy061PKVC91rtIvj6YELWWXT0GN0FCb2bQmVe5VsjEpTvdSVirdzX/M+4LO4Pox0CIOaJ8aVdLhTgyLrEHTvqAqzmjgn76k9XlG1GI6GdP3ShxvotGqnOFegKMv05wFWXgxENLcfyS9xMHmtjKEeYmVEx+0mDVh/0JMn2/Md75JXysRKLHe0KJtzXgFJQR7jd4uDukZrGT3auSD32mFPOkZnLCjqJeqLnwbTsEkZqnA4TAYsLQnXIc/J2KZ2Nhu8fpH/Zx4hs2gJ73R9iHWkkSK0muUzh/AP8uNs3GQo0ru6DjIkvaVCse/0/vs2rQdUK7l6Dh5dH3ASzKwjtUaennNJj2nZJW+n+skusvg8481GMu/iN/6cBkQXRWR/Meiz46vYD67xK7KAdnsp9mZmSMGkkan9ulUhYFwCHs9FxYPFhauWG21fCL6dVFpHvtYgPnSxeXQqfDknivJNca985yDCk/YfwryhMmRgM3ddP+bjJ9OhLbPvOB8vKK/w/1LWHOq8Qvorf4g9PYvwFzM7XPZnWwd/zhUbN2p9mrDpFKsFEcYuu0F67sajY6OXbUxTfXhdZV6X1TObvCBKfho7weu/proXzyS99/M9X1iel9MJqAkvUcu9+GpX4K8EKFbN0kqAUQ+eZHQfkPuJCX7m9Iv4b8NVVs4i1cfLH6dH+zwpb1CzjmTy/mLuGohP8300txVouOVKaJGgLXvk3uBRGxWiOUPt7eR+ZvzKwVDAmmgCCatDygHzxgHkzELYpd0KetQSQ+mFP6jmnuWzfBKXlaQlPVJgETKRnR0590BnBX4pxHcJ8ApDElr76iSRmrHbgg22aioEhfX77+QXmd1E2kOiH3XWhk7yGe/TohpD8mwbYgO9UZ6kz7OXDKyeI1dPxHmjMrnXOgdqkYd9VZJgAT3PSW9REaadWU2oudE8J5+Z0t1J6ILd8jmKHm78fZT5PdW7ru2aq/8EUfnfgkBpiIvU8RdRDWdzvD/xDz/JTQW7y5sqG78O2hLO+TgKU1q0HiQLJOesb6dHtTGwTDPTShbUTK8rfgiM1m/Mep3jW0tf6ERs0O+J/fDmvv4j9Q6967Pb9I5pHyg/SnQhaM2+mcjZ8+jHzXMrHdF39rOOaD/JfwJ7WzVNPu3JfrUL7MOZA2wSK2K3wKkDUS5YGXcQzbsz+OeCl2KA36CMSS6jkPlJGbFxOLu9krnFghHfUxR50nU+3N2+NavbpkhSYEJpQLjMqqDf993+ZKXx6RvOPQKcOFfRYP/LalDz2YHEWR/CJd4GDEw2mB7KJdCqj0G+O4+23n0SVEbIb3LwHIMiBhonkj33/OQ1z3YlWl8PaMICgL8lvN3jL1l6y+eR+wXV9pcSKxgOrTo6Xp3/29ZUcSf/I/HHKdfd2XhO3A6ouw9wFX43+lcOnznOoIqyCyc+fJNmNSyfGJ431doBU/BMqTMfNng99/Tl7/tIHNHFgA4/XddoP5rhk3ayElGEO8vxB84zFseDKzOsofwXJSQik01UIqalXJpg7SxGS4a98NhDMeAUub7NyARUZCwtGr9i+xmRv51PHm4w772+bHmV92NtAuoTeepHtvJsEfsFMkbrZujyYGhQT9vPvcs3pNym4euqdw6ExlVHCh1ZZ/xr6A0bBzIPWexbdB+JqG3Kvp2ShjHz1FMANY/1Fova37XFEn34XyqcEn25yhBgLiTV+xCefNHwir+nuqq3JkgAZ+foxAAu49L/P1u0UUiY9drIkf9qA/0TpLNwpTiEKeq/XqLr+NfKDtn5lO3edxgsJuTSZGKNZeYkE3wGh/TGql3mMYmXnLChQ4m+jKsZ8D1juo0Io4BllSqzhjiEfB3W6Vk/8LWa+nWb1K0m+Qban++uVKlcSBb+SbLTtqrk6rNxvLEiP97/uE+PUIeeRZQitB+zCouI5TfihxhyHFE9qlUlvrSp1oAZNcDXkL8bBKIX4k7B3LxidX0D0rjnwlLGN3jZS+VmmxCxxPtkX0OGurRSjMy5VTvWVAyQJSEPmJQzGuvYKHKxd0+78K9n35I/RWMOycJf3gk9m8idOeXna8BvUVDullTXFZ/BT3xv+Xoq95LMa0+NswYCwW7Pu2doIs5jp61dUkbMq9XLYf9A6CGnaOWN762leZgcCLgKmuW4u/Pcjp7tRoOYL/HxNnnL2Ji9H/1O/hPoyCeoV3RKjC6yxzAx1O5FPVOgAjEsl2DBQwmLuUQYowBtMtEvoikxCw+m93a6Ere9AqTbHrAyfw7fzdVaqFlmXykRlZvq7E0lECQUgxMMx4SV8N941P2Cxjp9p9JA+o/rIdsaKlmrZDj2WdFCexRX+zIHLNumZex6sZgPgvPhFF6Q/pMp0lK02ox29CVrafNwn1kNk1vZZ8IXDwsok+9E4nESbHAXAS2TM5rwnEzkXId1E9Kwy+0Rug+2WUp5ZENJl3HdEQriYhhsbsREY88/oGuV2ct0Ofl+mylVSG9rD0d9bUpqa6aDOGqmO6WTJjbU571WB37Da32h9e6eKZcxCzqNUtbQIQdDmXrwWFNYpzx2nQS725X3gdhq0HVuLyNEKtouHDdUO/sLNK7TXEOZhcdC7saQbTMfxCrjc1TXM9xNMayDvyW8I+25FIec6q7sTw9OmVIZ2XUZ9SmHnualHtzEGv/JmHxG4ecuNAx4HTnOUwyDw/z+5OFBi+RCViwjLfoZkKo4TOPCG4PE/G1b8mR2cIjhxjNVhHB9gQDa5lITlsa+OuVVErxUx7Xyep4ogZk2Atpdqdw8TXm6MNbUHEgYXI3YPrj3QFxw/nbqVV2DgjjTn5tcWV8pHBGRTkjacLiPtPVG18rqOeuwDgVCGX+LPw5RjNurY320xogUXWTh+NudFd8BlNu2UTU0pZlKAZ5RVrRDp0/zMjSxMMeGjfdUZmb65GlPyMpknhntscAylXx2ItJcMz0LZB7UrFWxTTLP1f4BnYelRoZ89d9yrhiVlwuNr2JKlApBo0SjnrvUmY7NxycIBrGzLVLLAy+Hacf0VZfL7zZ2odSCS2JUtPDLx/k3mZLCsJZNcMjLXSovNI3sHQLTmnaixVatdfILfCM+dydvN0bZxl72nHIVkfY2jdzt7oT6Vi0NVtzdi4ps8LFWxW1rgIVJF6P3lshq6IJXMmziwGzShINtiO5A5Em78Edm+OKxESnxaCuC1Yyx4DPX/b4O62jrQ2uxFt7DXi5Lix9ktNjPnkelZES1wrmEVKNoWbXPZtdl96igkTxKWdexM22uOlq4IuFzrvcLJEoR5pR6U+YUSnycFf/txv9gSBovFWi5+ZdY/hb7058JKUBntblVN2C680cIPw2oSFstUHtXrehheJ7sGrj3dvS0m+OVwUDYYenYLaxXvK65/se7B7MZFs+oq2Vy6L0SGegHqAzHZXffY6zx6xYJvoeA4C4QKCFvikY5tj9fnPKhiPmozxXiaPZ9AqlMu2NPjh+XbFnv5MtOhU3rrrGqoUvHohWEqILyXy1jJ1d7bBobBduvbHsdxpafiiYroEIHb5oBfaIBEYOUexhwkIv9wYyXreHsUFmRTmOAZHT8YnPqKkqjfPvDuqgRJriPiwS6AOatwMzAtS3eHta7Oluegtti5t4Cw9SiqTsdO7dkR+pqZyzvEit4kmC+l6TuoSawGIJv8mfeSj+nBdKEQpBrlMwB2EUhW0nkk1/H9Vmsh7nGvAAIyQdsOI1ppuKnTZMRBnpNTIUqHl9nBEvpSXq508oulb08miwKeyxxHz5/qGxUlN3aSWIe+6M4JcmnSU9Fqfa3BH6Pe88rhyXXu5NvYQ3BVOklN92/ERqaIX6TB7ec1w7G8BumkE6qhme5PRcMVSAf067TXkTlFmrHDx/TeRlINWX+Zod6TKY/WFgYQPHNzdnXP/e/j2Mb3/p3UPNoNCzDkV7+ENGmtOoje7UALkrprcaB5AqOARLitil7Vr6oce2tL0vDS+R+SQ6a+qDSceAgQhfn9Irv0Pp2kWTQsGQNJdbCdFEPp7IbP1jAMRTTr5+Igh+KU9Mz+cAzXc0ueNH8hofJH8PcbKykLvMT9KCGcUmPWVKg6v31xTDqr/kKKujWBczE78JeQ4nBHjbk7ILgDxJC8iqi7zS661AHE7ZJAmh8j6ZZ2CHgQHLgmUOUZnJLzsGLLTi0bZCRa2fKWloNOrNAH7lWtwnNO+90yRWdsNFzRbllFUusTjkdqJExghF7crWyupwkQbbZUfa7hnNrHADhD6kYkO/WXEcFZejLuQlNMMAJEGhPLyQ7pvRCq24tKwmHqs3rJhYKYGAFH8deQbliBJeYeteXXLBTfXox+jl7ksWFx/d0VfgJN/ceWvs7NLKoMd6DQcjxeahSaDzGohiKA+QcFL4dAe4WXRpfDPKwiVPuM3ZHVnxfoJ7tecJbti8z0F+Ef+SvVvog3nU5MDHZ+7X5nPv99/WvdT0mINaUm1uDMZAc+t/0tb8xBDRu9T0ylagqcPYNTCSUPumj9ERcrTFeK69TRW1zQWn7fs0wJRLc/oxA9FtFkygDp+NxUB4FUp8wSKUNA4T+X1xFTGc/nOnz5+PATON5oDzoab6dRNtfuLJNSE3QwzRle1kp0w7Pc6QeIY8OATS09LcXrNK1W/m8+P7E1s8MFVIuokIWNfOUk6t8M6VUKm5fUMWFqSPWMN9dUg1WWXt1IA55PdDYA7pPMM6li9nKTnTsIJoPXN0TK2lmSG215SDv7pp91VHp+NtYEu7Esi+oSVcf4LH5RvzMnQpoECSTZ9UafYYsyr63UltHDOFXretzKbz0n4Typ+r1RER/TjswOd8rC6pcklgoHcRbNxUpTyCIblrqcuc+kKKw4egsta+OmO5ViakJ/souVkSKvs3RIZzV8yLCcKqI6/DuVJLghuf5ZWFpD02vSC/WabNuh5sp4fl91ACsUZgqg/q54pG25oln8dpvAXTeOcvY7BT7SghWY641BJNB+Cu+kABn21hcvttqsj7Mt70TVw+rJGSsUsafewUHL7f2R0CyhkaQ9vkd6qH3sAmuhomT4MlA12bAE2qs46CZYwED1++puvG204M+fHcaby84z2bEIiC1KFEPjM51hxq38gxrPp64es8x5vf9GvGqe0lfOyj7dRSU9p+eD5bJcl6V7OgTNPf2HlKWcONnk9y91V0QgfONjWo9zNznIGUyp2QuOZ7Ed1olXv8EHBcyOU94avfcQ9L4JMW8IHnrx2cTMtzn8vhenO0UjcZjsJwGiLHgKKZKn7tpF0qDUQpjs4LLKSt0f/4Rz7c5F4YdchHJMwYuMdpIKQj1SQux/U80T0622n7MZsmeXNtX4Q16RZA/tb/rEX/mcBDdi18kjlymnWy9Xaqf3ZaeJsGbJc6AtNyEKJ8cymmV6d++Ui3lAcENlFNUKkcIO3SyKPsu55OqhXe8NTbs3N742hlWu8OnO/Uipg7Xbx33t5ohZeTfDtZsCXLamnhZfSDI7cXX4WTJS/Z2NTb2DZ09CjtUsTC92QtHtOuxL5dNphxaNe3vDyvlrwArLXSVlU5rP8yzsW+p11WKBAItE3Gee+ors9TFqYzBWAgIAzmwcAcrJBeLls/lQIjb82cfRDgsSA5Xp2Khv4Y+XDBE+25cyBdr8vCFplqY/aplZej+ae2n5Azia2yAQqblZKVtqT0o9TCW4/6OVmZv8Q6XQ03EFGEMQRmySaQGaIYsNudOceAgxkaBzRucgBnihtzQcOcWnqW5whXe5B+2Ay8jbLfrjqmUbDbWEgsUAwq11tdSe8v+p6zOoVxu/EZ1IgFualqDst0N+flVarkgtrLg4fDIkJV0tQjAz8C5HWvtS21rYgNH6XfBwAmAf+rF9fxyP8DVm+z1g== \ No newline at end of file diff --git a/docs/docs/how-tos/streaming-subgraphs.ipynb b/docs/docs/how-tos/streaming-subgraphs.ipynb index 4bf593ffee..2ef34464a6 100644 --- a/docs/docs/how-tos/streaming-subgraphs.ipynb +++ b/docs/docs/how-tos/streaming-subgraphs.ipynb @@ -6,21 +6,28 @@ "source": [ "# How to stream from subgraphs\n", "\n", - "If you have created a graph with subgraphs you may wish to stream things occurring inside those subgraphs (or you may not!). This guide will walk through how you can control the information that is streamed back from subgraphs.\n", + "!!! info \"Prerequisites\"\n", + "\n", + " This guide assumes familiarity with the following:\n", + " \n", + " - [Subgraphs](../..//concepts/low_level/#subgraphs)\n", + " - [Chat Models](https://python.langchain.com/docs/concepts/chat_models/)\n", + "\n", + "If you have created a graph with [subgraphs](../subgraph), you may wish to stream outputs from those subgraphs. To do so, you can specify `subgraphs=True` in parent graph's `.stream()` method:\n", + "\n", + "\n", + "```python\n", + "for chunk in parent_graph.stream(\n", + " {\"foo\": \"foo\"},\n", + " # highlight-next-line\n", + " subgraphs=True\n", + "):\n", + " print(chunk)\n", + "```\n", "\n", "## Setup\n", "\n", - "First let's install the required packages and set our API keys" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "%pip install -U langgraph langchain-openai" + "First let's install the required packages" ] }, { @@ -29,16 +36,8 @@ "metadata": {}, "outputs": [], "source": [ - "import getpass\n", - "import os\n", - "\n", - "\n", - "def _set_env(var: str):\n", - " if not os.environ.get(var):\n", - " os.environ[var] = getpass.getpass(f\"{var}: \")\n", - "\n", - "\n", - "_set_env(\"OPENAI_API_KEY\")" + "%%capture --no-stderr\n", + "%pip install -U langgraph" ] }, { @@ -57,315 +56,137 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Define subgraphs\n", - "\n", - "We are going to use the same subgraph from [this how-to](https://langchain-ai.github.io/langgraph/how-tos/subgraph/)." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from typing import Optional, Annotated\n", - "from typing_extensions import TypedDict\n", - "from langgraph.checkpoint.memory import MemorySaver\n", - "from langgraph.graph import StateGraph, START, END\n", - "\n", - "\n", - "# The structure of the logs\n", - "class Logs(TypedDict):\n", - " id: str\n", - " question: str\n", - " answer: str\n", - " grade: Optional[int]\n", - " feedback: Optional[str]\n", - "\n", - "\n", - "# Define custom reducer (see more on this in the \"Custom reducer\" section below)\n", - "def add_logs(left: list[Logs], right: list[Logs]) -> list[Logs]:\n", - " if not left:\n", - " left = []\n", - "\n", - " if not right:\n", - " right = []\n", - "\n", - " logs = left.copy()\n", - " left_id_to_idx = {log[\"id\"]: idx for idx, log in enumerate(logs)}\n", - " # update if the new logs are already in the state, otherwise append\n", - " for log in right:\n", - " idx = left_id_to_idx.get(log[\"id\"])\n", - " if idx is not None:\n", - " logs[idx] = log\n", - " else:\n", - " logs.append(log)\n", - " return logs\n", - "\n", - "\n", - "# Failure Analysis Subgraph\n", - "class FailureAnalysisState(TypedDict):\n", - " # keys shared with the parent graph (EntryGraphState)\n", - " logs: Annotated[list[Logs], add_logs]\n", - " failure_report: str\n", - " # subgraph key\n", - " failures: list[Logs]\n", - "\n", - "\n", - "def get_failures(state: FailureAnalysisState):\n", - " failures = [log for log in state[\"logs\"] if log[\"grade\"] == 0]\n", - " return {\"failures\": failures}\n", - "\n", - "\n", - "def generate_summary(state: FailureAnalysisState):\n", - " failures = state[\"failures\"]\n", - " # NOTE: you can implement custom summarization logic here\n", - " failure_ids = [log[\"id\"] for log in failures]\n", - " fa_summary = f\"Poor quality of retrieval for document IDs: {', '.join(failure_ids)}\"\n", - " return {\"failure_report\": fa_summary}\n", - "\n", - "\n", - "fa_builder = StateGraph(FailureAnalysisState)\n", - "fa_builder.add_node(\"get_failures\", get_failures)\n", - "fa_builder.add_node(\"generate_summary\", generate_summary)\n", - "fa_builder.add_edge(START, \"get_failures\")\n", - "fa_builder.add_edge(\"get_failures\", \"generate_summary\")\n", - "fa_builder.add_edge(\"generate_summary\", END)\n", - "\n", - "\n", - "# Summarization subgraph\n", - "class QuestionSummarizationState(TypedDict):\n", - " # keys that are shared with the parent graph (EntryGraphState)\n", - " summary_report: str\n", - " logs: Annotated[list[Logs], add_logs]\n", - " # subgraph keys\n", - " summary: str\n", - "\n", - "\n", - "def generate_summary(state: QuestionSummarizationState):\n", - " docs = state[\"logs\"]\n", - " # NOTE: you can implement custom summarization logic here\n", - " summary = \"Questions focused on usage of ChatOllama and Chroma vector store.\"\n", - " return {\"summary\": summary}\n", - "\n", - "\n", - "def send_to_slack(state: QuestionSummarizationState):\n", - " summary = state[\"summary\"]\n", - " # NOTE: you can implement custom logic here, for example sending the summary generated in the previous step to Slack\n", - " return {\"summary_report\": summary}\n", - "\n", - "\n", - "qs_builder = StateGraph(QuestionSummarizationState)\n", - "qs_builder.add_node(\"generate_summary\", generate_summary)\n", - "qs_builder.add_node(\"send_to_slack\", send_to_slack)\n", - "qs_builder.add_edge(START, \"generate_summary\")\n", - "qs_builder.add_edge(\"generate_summary\", \"send_to_slack\")\n", - "qs_builder.add_edge(\"send_to_slack\", END)" + "## Example" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Define parent graph" + "Let's define a simple example:" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, - "outputs": [ - { - "data": { - "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCAHiAf0DASIAAhEBAxEB/8QAHQABAQEAAwEBAQEAAAAAAAAAAAYFBAcIAwIBCf/EAFkQAAAGAQEDBA0HCAQNAwQDAAABAgMEBQYRBxIhEzFWdAgUFhciNkFRkpSVstMVMlRVYdHSIzVSU3GztNREYnWBJCYzNDdCQ3JzgpGTwQlFoSWDovGEsfD/xAAaAQEAAwEBAQAAAAAAAAAAAAAAAQIDBAUG/8QAPBEBAAEBAwYMBQMEAwEBAAAAAAECAxESBAUVMVHRFCEyM0FTYXGRkqHBE1JysdI0wvAjQmKBIrLh4vH/2gAMAwEAAhEDEQA/AP8AVMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH5WtLSDWtRISRampR6EQyby6ehusQK9lMq1kkZtoX/k2kFzuuH5Elw4FxUZkReUy4ScBrpqievN7IpWpq37EiW0g/MhnTcSReQ9DV51GfEbU0REYq5uj1TdtaS8np0KNKraCky8hyUF/wCR/O6ql+uIHrKPvH8TidIhJJTTV6UlwIiio0L/AOB/e5Wl+p4HqyPuFv6Pb6J4juqpfriB6yj7w7qqX64geso+8O5Wl+p4HqyPuDuVpfqeB6sj7g/o9vocR3VUv1xA9ZR94d1VL9cQPWUfeHcrS/U8D1ZH3B3K0v1PA9WR9wf0e30OI7qqX64geso+8faLeVs5zcjWEWQv9Fp5Kj/6EY+PcrS/U8D1ZH3D4ycJx6Yg0v0Va6kyMvDiNn/4D+j2+iOJtAJdylmYqg5FIp+bDQRG5TvO7+qSLjyC1cUr8yVK3D008DU1Fv11hHtYTMuK5ysd5O8hWhkf7DI+JGXMZHxIyMj4ilVF0YqZvj+ayYckAAZIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAExhmlm/c3a91TsuY5FbUWuqWI61NJTx8m+Tq//uGKcTOz9PatRNgK1J2FYy2lkZacFPKdR/1bcQev2imG9vztUdHR3dHomdYMzJclq8OoJ93dTmq2qgNKfkynz0Q2gucz+4uJnwIaYkNrtVU3mzTIoF5ST8jqZERTcmsq2zXKfSZlwaSRkZrLgZaGR6lwGCEHnfZVYtjezfuupkTbuP8AK8WoUyqumMLbcdW3vGpCmd8t1te+WqSJZ7qSPVadaXIuyDwbEqeos7ezmwY1q049FQ5TzTeNtsyJa1skybjaUmZamtKSLUvOOibCNtCy7YlmER2tybIKymvqqbj538Dta7nRGJEd+QhbRkk1qRuLJClJJTmnlMVO0TMLzMMsxyW7U7RYOz+TVvrTCx6BJh2Dtkl/cS3K3N11lvky3kGo0IM1aqVoREA7WyHbtguLw8dlz79vtfImVv1C4jDsnt5KUoUfJE0hRqPRxGiedWvAjPUTVP2S1Fc7Y14M3Bs0IXWQp0aaqqmlyjkg1mSFpNgiZSSCQe+syLeUpJ6KQoi6o2J4Lf1h9jixZ43aQnscjZBFsO24i9ILhkSG99em6RLLXcVroovmmY7Js5FhhPZRSbiRj11Y0uRY/Aq49jVQVymY8hqU+a0vmgj5JO6+hW+rROhK46loA7wAAABMUWlVmF3VI0TGfbbs2UFr4KnFLS8X96kEvh5XFCnExDLtzaNZPJ1NuHXMxlHpw5Ra1rMtfsSSD/5iHRZcmuJ1Xe8JjpU4AA50AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACctYj9JbrvILC5TTzaWrCI0Wri0J13HW0/wCstO8ZGnnUnTTU0JSr8XmN4ltXomGbetq8pqUvcs21MZRJaS6klJ3t1RGRKIlKLzlqZCmGFZ4XV2ctcwkPwZy/nSq+QuO4vyeEaDIl/wDMRjeKqa4iK9e3eninWlC7GzZOWumzfFi159Kljj/+I1MZ2LYBhdu3a0GF0VLZNpUlEuBXtMupJRaKIlJSRlqXAcw8IfIiJOUXySLycu0f/wAm2Zh3EyOlV9/3mfhCfh2fz+kl0bVQAl+4mR0qvv8AvM/CEntOrrbEsXZsK/Kbg5CrWshny7rJp5N+eww5/sy47jqtPt04HzB8Oz+f0kuja7UHylxWZ8V6NJaQ/HeQptxpxO8laTLQ0mR85GR6aCc7iZHSq+/7zPwg7iZHSq+/7zPwg+HZ/P6SXRtT5djZsnSZGWzfFiMuYyqGPwgXY2bJ0mRls3xYjLiR/JDH4RQdxMjpVff95n4QFgvKHpJyG+ktnwNBzeR1L9rSUKL9pHqGCz+f0kuja0LnI2691MKI2U+4dL8jCQrQy15luGRHybZeVZl9iSUoySf0x6l+RICm3HSkTH3FSJUgk7vKvK+coi1PQuBJSWp6JSktT0H0p6GvoGFNV8VuMlZ7y1J4rcPTTVaj4qPTymZmNAVqqpiMNGr7/wA/nY7gAAYoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB19t0NJYHH3jMi+X6Pm8/ytE08peX/APR8w7BHX23PXuDj6Gkv/r9H84iMvztE8/DX/wCfNx0AdggAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6927ER4FG1UlH+MFFxUWpfnaJw5j5+b7h2EOvNu+ncFG1MyLugouZOv/u0QB2GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP4pRJSalGRERamZ+QRaswu7YikUlbCOtVxZkWElba3k+RZIS2eiT5yMz1MvIQ2s7Kq1vwpuvWoCI+Xcw+gUfrb3ww+Xcw+gUfrb3wxtwWvbHjBctwER8u5h9Ao/W3vhh8u5h9Ao/W3vhhwWvbHjBctx5G7OPspHdicyjxp/DnraFYKg27VqU0mkcpGnIeWxuG0rjoyjwtdS5Uj04cfQXy7mH0Cj9be+GOqeyJ2JWHZH4pX0t5GqISoM1EtiZHkOKdQRcHGy1a5lp4H9pJPjpoHBa9seMFzsjYFtTsNtOzKuzCfjasXRZKWuLCXL7YWtgj0S4o9xG7vGStC0PgST148OxR1/VysmpKyJXQamgiwYjKI7DDcp4kttoSSUpL8nzEREQ5Py7mH0Cj9be+GHBa9seMFy3ARHy7mH0Cj9be+GHy7mH0Cj9be+GHBa9seMFy3ARHy7mH0Cj9be+GHy7mH0Cj9be+GHBa9seMFy3ATlDlEiXP+TbaG3AsFIU6ybDxusvoIyJW6o0pMlFqWqTLmPgatD0oxz10VWc3VF1wAAM0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAzclUacdtTI9DKI6ZGX+4YmsVIixinIiIi7TZ4F/uEKTJvFu26o77hibxbxYqOps+4Q9Gx5me/wBk9DUAAFkADjWdlGpq2XYTHORiRWVvvObpq3UJSalHoRGZ6ER8C4j5UN5Cyejrrisf7ZrbCM3LjPbikco04kloVuqIjLVJkehkR+chA5wAODaXlfSHDKwmsQzmSExIxPuEk3nlEZpbRr85RklR6Fx0Iz8gkc4AGOrLqlGXt4ucvS9cgqskxeTXxjpcS2pe/pu/PWktNdeOumggbAAAkAAAGNOPTNsT08r0kj/Z2us//BC8EFP8dsS/48n+HWL0Y5V/Z3e8pnoAABxIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABmZN4t23VHfcMTeLeLFR1Nn3CFJk3i3bdUd9wxN4t4sVHU2fcIejY8zPf7J6HOmIediPojukzIUhRNuKTvEhWnAzLy6H5B48f2gZRsU2b5vDubHIj2owqhmUp23s+362Q0uWiOuwiGZGTSUm7qbZpTu6J1SotTP2M8yiQy404W82tJpUXnI+BiCxbYHgOGNWTdZjrXJ2MTtCSmY+7LJcbj+QLllr3WuJ+AnRP2CKomdSHUUTB9peNwb2XPmLPF3sesET2LDLHrtx902DNp1knIrXJGR7xKJKt0yXwSWhDDcvsqt8c2EYLj7jjESyw5qyfJi7XUOTFMx46UtIktsurLdJalmlBEai01UREZH3/hGxDCtnT0l2hpjjLkR+1F9sS35SSY115JJOrUSEakXgp0LhzDiyOx82fycSr8aXjyU09dIVJgttyn0OxHFGZqNl5KycbI9T4JURacNNBXDI6fdb2k0r2E7P8nyN+oiZLkExCLWvtFSpzUBqJyyIhzFMtmbi3UrLlN0l7pEWpnz87bnsqh1cTZVWLyLKJrK83jtlIlXsg5DaXY7upE6SiVqRtluqMzUnfWRGRKMh27J2I4RMwdnEH6Fp6gZfOU1HcedU429vGvlUvGrlCc3lKPfJW94R8eI+PeGwZWHP4sukU9TPy0z1oemyFvHJTu7rxPqcN0lkSUkSiXqRFpzCcMi2rYKKuuiw23Hnm47SGUuSXlPOqJJERGtajNS1HpxUZmZnxMdO5FNcreyjRKZZ7YfY2fzXW2S53FJmsGSS/aehf3iqXimcUe5X4reY5X4/GQhqJGtamXOkoSSS133+3Ums97U9TLXQyI9dNT5sPZ23cWNFfZc3W2mWUy3Th2dXHehJbQstDRuG8s1EZc5KUpOvHQjEzxjoHG7i/pcJ2QbRzzW6uLrLrmujWddImm5XvNzDUTjTUb5jRs66kaND/JK3tdTGXj9zkOVZnjyFZFlb+fNZo43kOPIkSG6yHWNPOKT4CdGktk0mOpKtdXDXoe+SjIvQ1FsDwHGsobyGtx1qNZtOuPMHy7q2Y7jmvKLZYUs2mlK1PU0JSfE/OOq2+x2y9naIi0rpFRjFem7OzXPqLq1N51k3zdWycJxw4xG4RmlZlqnwlGSS5hS6Rz9kjl1hu1h6iz20yORk9sU9+ukuWRv0tnHS6SyNlj+jutNqQk0aEWhqPVWpaehRDYpsRwrCckdv6el7XtlpcST7sp58micVvOE0lxaktEoy1PcJOouRpTEwMWf47Yl/x5P8OsXogp/jtiX/AB5P8OsXozyr+zu95TPQAADiQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD8uOIZbU44pKEJI1KUo9CIi5zMxAv7aKixcXHxKHNzuWlZtq+QUoXFbUXAyXLWpLCTI+dO+a+B6JMy0AVuTeLdt1R33DE3i3ixUdTZ9whnu45nuatLReXcTEKx0jSuux1JSpSknzpXLfRukRlwMkMkZcdHOYx92Il9jEZmtRSvXkaMhLTEuJIZStaCIiTyiXVo0XpwPQzI9NeGu6XoZPMTRNF90338c3fdaOOLm6AxPla+6GWvrUL44fK190MtfWoXxxvg/yjzRvLm2AxPla+6GWvrUL44fK190MtfWoXxwwf5R5o3lzbAYnytfdDLX1qF8cPla+6GWvrUL44YP8AKPNG8ubYDE+Vr7oZa+tQvjh8rX3Qy19ahfHDB/lHmjeXNsBifK190MtfWoXxw+Vr7oZa+tQvjhg/yjzRvLm2AxPla+6GWvrUL44fK190MtfWoXxwwf5R5o3lz+z/AB2xL/jyf4dYvR13Kwy7y55ia/YTMPfh7xwjgLZefJauClOktK2zSZeDuaHwMz3iPTT+fLW0LEOFrSw82r0/03HjKHNIvOqK+5uK0LnND2p8d1viRDkymqJmmInVF3rM+6JdigJXGdp2NZZPVWwrJLNyhBOOVE5tUWc2nTXVTDhJXp/W004HoYqhxoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAT2WZ7S4YlhFjJWqbJ17WrojS5EuSZaa8mygjWoi1LUyLROupmRcROkWd50kzNRbPahZloSeSl2ziftPwmI5/s5Y9PKk+YKfK84ocHiNSL21j1yX1cmw24rV2Qv9BpstVuK/qoIz+wS/dbmuYeDjOOIx2ArmuMrSpKzL9JuC2onFF9jy2FF5jG7imzbH8OkOTIMI3rZ4t1+3nuqlTny8y33DUs0+ZGu6XkIi4CnAdftbHK+1fKVl1lPzWRqZkxarJMBH2Jhtklk9PIpxK1l+lxPW9YYbjMtsstpaabSSENoSRJSki0IiIuYiH7AAAAAAAAAAAAAAAAAAAAAAAAAAAAGPk2H0mZQkxbyqiWrCDNTZSmiWbajLTeQZ8UK/rJMjLziT7gcmxNJqxHKnpEZJ6lT5Sa5zBF+i3J17YQZ8OK1PEWnBA7EAB14W15GOETed0snDTI9DsnFlKqj/rdtoIibT9r6WTPzC+iS2J8ZqTGebkR3Uktt1pRKQtJ8xkZcDI/OPoZEZGRlqR+QQMvY9X10l2dh86RhFg4o3FpqkpOE+oz1M3YiiNpRmfOtJIcP9MgF+A68LPr7DlkzmtGaoRHoWRY+25Iiaed5ji9H/b+VbSRaqdTzC5q7WFeV0efXTI9hAkIJxmVFdS606k+ZSVJMyMvtIBygAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB+XHEMtrccWlDaCNSlKPQiIuczMQKslvtoLpNYmaanHjPRzJpLe84+RHoZQmFJ0WR6Ho+54HzTQh5KtSbvfVtZjT28eF17yoqmTIyTbyEHo5vceMZtRKQaT4OrJZH4CS5TsABOYlgFLhhyHoEdbtjK07btJrqpEyUZc3KPLM1GRcdE6klOuiSSXAUYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACGstmSK+fLuMPmFjF1IUbjzaEGuBMXqRmciMRkRqPyuINDnNqoyLQ7kAEnjWcrnWKaS/rzoMl3DcKIazdjyklzrjP7qSdSXOadEuIIyNaEkpJqrBlZJjUHKq3tOclZElZOsvsLNt6O6XzXG1lxSotT4l5DMj1IzI8fC8imuzp+OXqkqvqxKFnJQ3ybc+MvXk5KC5i1MlIWkvmrSfDdUg1BWgAAAAAAAAAAAAAAAAAAAAAAAAAAA/DrqGGluOKJDaCNSlKPQiIuczGAW0XFjLxhrPWkfeMa7azsucqiO+bkxEzqUQCd74mL9Iaz1pH3h3xMX6Q1nrSPvGfC8n6ynxhOGdiiATvfExfpDWetI+8O+Ji/SGs9aR94cLyfrKfGDDOxRAJ3viYv0hrPWkfeHfExfpDWetI+8OF5P1lPjBhnYogE73xMX6Q1nrSPvDviYv0hrPWkfeHC8n6ynxgwzsUQCd74mL9Iaz1pH3h3xMX6Q1nrSPvDheT9ZT4wYZ2KIBO98TF+kNZ60j7w74mL9Iaz1pH3hwvJ+sp8YMM7FEI/a5kEzGtnV1LrHUMWzraIMB1wtUolSHEsMKMvKROuoPTy8w53fExfpDWetI+8Qu2HaDjrtBRoZv61WuR05rIpCVeCmewoz0I/Jprr5OcIyrJ54otI8YMM7HZWN4/CxPH62lrWuRgV8dEZhBnqZIQkiLU/KfDiflPUxpCd74mL9Iaz1pH3h3xMX6Q1nrSPvDheT9ZT4wYZ2KIBO98TF+kNZ60j7w74mL9Iaz1pH3hwvJ+sp8YMM7FEAne+Ji/SGs9aR94d8TF+kNZ60j7w4Xk/WU+MGGdiiATvfExfpDWetI+8O+Ji/SGs9aR94cLyfrKfGDDOxRAJ3viYv0hrPWkfeHfExfpDWetI+8OF5P1lPjBhnYogE73xMX6Q1nrSPvDviYv0hrPWkfeHC8n6ynxgwzsUQCd74mL9Iaz1pH3h3xMX6Q1nrSPvDheT9ZT4wYZ2KIBk1eWUt3JONX20ObIJJrNph9K1bpGRGehHzcS/wCo1hvRaUWkYqJiY7EXXawAE73xMX6Q1nrSPvFbS1s7K74lURftm4iJnUogE73xMX6Q1nrSPvDviYv0hrPWkfeMuF5P1lPjCcM7FEAne+Ji/SGs9aR94d8TF+kNZ60j7w4Xk/WU+MGGdiiATvfExfpDWetI+8O+Ji/SGs9aR94cLyfrKfGDDOxRAJ3viYv0hrPWkfeHfExfpDWetI+8OF5P1lPjBhnYogE73xMX6Q1nrSPvDviYv0hrPWkfeHC8n6ynxgwzsUQCd74mL9Iaz1pH3h3xMX6Q1nrSPvDheT9ZT4wYZ2KIdf7UCKit8Pytvcbdg2jVXIcUZka4s5xEdTfDycucVzj+qIb3fExfpDWetI+8Qe3HP8cd2aWBMX1ct5MmEtJJkJUeqZbKuYj+wIyrJ5m6LSPGDDOx28Ane+Ji/SGs9aR94d8TF+kNZ60j7w4Xk/WU+MGGdiiATvfExfpDWetI+8O+Ji/SGs9aR94cLyfrKfGDDOxRAJ3viYv0hrPWkfeHfExfpDWetI+8OF5P1lPjBhnYogE73xMX6Q1nrSPvDviYv0hrPWkfeHC8n6ynxgwzsUQCd74mL9Iaz1pH3h3xMX6Q1nrSPvDheT9ZT4wYZ2KIBO98TF+kNZ60j7w74mL9Iaz1pH3hwvJ+sp8YMM7FEAne+Ji/SGs9aR94d8TF+kNZ60j7w4Xk/WU+MGGdiiAY1dmVDby0RYNxBlyVkZpZZfSpR6FqehEfmGyNqLSi0i+iYmOzjRMXa2dkfi9adVd9wxxce/MFZ1Vr3CHKyPxetOqu+4Y4uPfmCs6q17hDxMv5+nu92lGpoAADhXAAAAAAB8Zs2PWw35ct9qLEjtqdefeWSENoSWqlKUfAiIiMzM+bQdSbMOyQpdpUbI7tL1JV4bVPrjpuH75lTijS4pslvMkRFHQvdNSDWvVSdD0IjIdvuNoebUhaSWhRGlSVFqRkfORkPHmOXJYj2IVE9Hg1bMWflEqJYWljXplx6xg7eV/hjjRlork91JJ3uCVGkz4ENqKYqjtvhEvRuT7XceqNm9jmFXc0VxAZbV2q8q6jx4cl/wD1Ge2lKNtBqVonUz4aj4Um1piwzrJcfnRGKuNR00G4fsnJqVNbsjljURnuklKUEwZ7+8ZGR68NOPlKIqr72HZRwq+3Vf1rlY1ZQ5j0BqIiVvQVIXIabbbbbNPKNGnlEJ0UaNdTM9T7Fs2198TaTRm06VllGziI1TtcmrSYtpmYlxCFaaGpJut6p110URjWbKmImP50b0XvSpZNTmqpSVtBNVuRnXF2yjWaRNm4fI8fyngEa/B18EjPmGdT7ScRyGYcSqymls5ZRjmchDsGXV8gR7pu7qVGe4RmRb3Nr5R5pxbaRj2XZV2MVTUWBTbCtbkNT2ENq1huppnkKadMy0Q4SiV4B+F4JnppxGLSYXId/wDT5aLGa1w7CWjtmwKtYSuXKjlYb0lJEZHyhmyhRbqiMlJTu6GWhCvwYi6/p/8Adxe9b4znWN5qmQrHshqr5MdW68dZNakk0fHgrcUeh8D5/MJnbc6ljHKFxbiGkJyamNS3PmkXb7Ov7P2+T7B1hsFpcDyPaMjJsX2hzstsa2qVDcZbq4kJhDDqkmSHTjxGdVJU2RkhRmaOPAtT1suyptZlFsjdsq+uO4nw7OBIj1uhq7bcTJbUlkyIjMyWZEnTy7wphw2kUwX8S/xzaFi2YHMKhyWnuzh/5z8nT2pHIc/z9xR7vMfP5h+aXaRiWR2iK2pyiltLFbCZSYcKwZeeUypJKS4SEqM9w0mRkrTQyMj8o8hbK7epf2zYTZV2TxLt20xazhzSqqZmvgxX9xl5MNBttpNSkk26rk3FrWkkanprx2MCpa+m2R9ilYQIMaFOXcxm1yGGkoWpL1fLN4jMi1PfMiNXnMuI0mxiOn+ce4vep67aFitxkEihgZNTzryPry1ZGntOSWtOfeaJRqLTy6kMHZ7ters0hS3ZpRqJ9GQWGPxI78xKlTHIrriNW9SSZqUltS9wiMyIj4npqPJTGWRb/NNnmRTLOHVZMxnOlji9ZSNRipGnHH2FHJkE1yu+4a2941uElxTupJPQjK6rad682S7Va2uJTOb4bmtjk8OM82pC0OpmOSoxlqWhofaJaCMtSMnDEzYxEcZe9O22dY1QHYFZ5DVVx1zbb00pc1prtVDhmTanN5RbhKNKiSatCPdPTmGFabWqdFth8KmsKS7LIpC0NKbuo7azjpQs1PsIMzOQRKSlJpb4lvGf+qY86LgY5n+yKdtQy+7VhjuU5SxeU9jNiFJbiNxkqZr0SGtDSps2kLWZGZFq984j0GjTZQ5m8LYDcSaSvp3+7CdHQqsjGxFlNpizi7YYQoiUlt4y5QiPj4Wup84r8KI/nYXvS9PnWN5DbzamqyGqs7SDqUqDDmtOvx9D0PlEJUak6GZFxIuI3B5X7Gi3oMe2luYVh06vy3GWq6ZLatE1qmLOmM5KDOHKdNJcoS1LNSdSSv8AJcSMiIx6oGVpTgm5McYAAM0gAAAAAAw5Pj7S9Qm+/GFOJiT4+0vUJvvxhTj183civ6vaGdesErs/8Rcf6gx7hCqErs/8Rcf6gx7hDnzjz1n3VfelNGqW+AAOBcE3P2mYfVXTlPNyukh27ZJNcCRYsofSSjIk6tmreLU1ERcOOpecUg8APXeH0+wk8SuWo8ba6xl0V22amRD7eekHctqVJ5Q0+EhTRlurJW7umSSPjoNrOjGiZue6V5fRNwLacu6rkwqhxbVjJVLbJuEtCSWtLytdGzSlSVGStNCURnzji320LFsVXCRdZLT06p3GImfPaYOR/wAPfUW9zlzajyhtFzSnwfZ12TOK3Uo4WQ2tjPnwK5TSzdlRn4EdKHmyIvCQRoXvKLgjdPeMtBwLxinqtquZntDzGbh8W2ra35HcXTw5sewhFESlxhpUiK8e8l3lNWkmRmbm9unrqNIsb/53b0XvYNznmM45ZQa+2yKpq58/TtSLNnNMuyNT0Lk0KURr48OBGJSl7IPB7raBlGHpv66LbUCybeRJnMJ5cyZN142k7+8ZNESicMyLdNKiPTd1HnfPYWL7CLnHJOO3CMryWPS1dYWJ5LWqkyLmGl4yYVGc3CNqQglKPQiNPgp3kloRjs7DLugxbsn9qFPdqjwLXInayRTtSWdDnNlCJt02j00VotCyVx/bziPhxEX6+LcXuyMX2t09ps+by+6saShqHJD7SJvy3Hkw1IQ8ttCykpMmzNRI13ddUmZpPikxvLzrG2qeFbryGqRVTTNMWcqa0TD5klSzJC97dV4La1cDPghR8xGPHeBu1OOUuxzIctbbTgddZ5W0+/IZ34kKauweKM48Whkkt1LyUqMtEmfOWo+sSsgXpUUqLWILBbjbEzLpIsiNusvMdoOkt1ttRaE0t5Dik8CIz1PTiLTYxfJe9j45lNLmFamxobeBd16lGgpddJRIaNRc5b6DMtS82o5NtcQKCtkWFnNjVsCOnfelS3UtNNJ86lqMiIvtMx1Fsgis1u3jbdDiNIjRO26mQTDSSSgnFwS31ERcNVbqdT8ug1+yTp8autlE5nKrteOVjcqK+i0TH5dMd9DyFMqW3uqJbe+Sd4lFu7uuplzlhhjHFPd6pWlZnuM3bdY5XZFUz27RbjcBUWc04UtSEmpaWjSo980pI1GSddCIzMQm33MaFvZnfMKvKxD8adCivtrlN7zTxvNOk2stfBXyfh7p8d3wubiOj6PaMiU9srzW9i1dTjVBllvXychqoiotbMQ5AdbZnEk/8m264rcNRmad4j8LQyETtQuazNYGcT4Z9uVUzapTbinWlJS8goLKd4kqLik9NSPTRRGRlqRkOimxuqj+dKL3s5jbJgEpmG8znGNuszHjjxnEW8dSX3S01QgyX4Si1LgXHiQ5t/tKxHFJK493lVJTvoUhCmrCxZYUk1kZoIyWoj1UST08+h6cw8zZxiVGpfZdrOng77dGw6hRRkapWVSpwlEenA98iV+0tecfapqYOQ3+2KXaQo9jJ73dKRPSmkuKIlxZil6GZcN40JM/OaS8xCnwqbr/AOdG8vemqfOsbyG1mVdVkNVZ2cItZUKHNadeYLXTVaEqM0/3kQ+ePbQsWy2fKg0eS091Nif5xGr57T7jPHTw0oUZp48OI80Y1DYxZfYyWFNUMnPViE8zYiNpbXLP5MZdJszIuO84WvHXwjM+czEfsnyCustsGxW9bvYLthLROi2tVT0bVfCpXXoa1IgqWhsl7/KI3SQ64ozNvUiLXjPwYmJmP5r3F71bebbcJq6ifKYy7GpUmPEdltx3LuMyThIXyfFal6ISbujZrPgSj0PjwHHb284NXV1WrJMxxfGreXBYmO1su/imprlGyWRErfIlp48FkWii4lwMdN7IMaqo3YX5RLbroxSpkPIXZD/JJNbqikSiI1HpqehNtkX+4nzCOzjaLs/lYnsr2bZBa01G3PxyunZHazNxLqICWG92Ihem8Tj56JPQ9Utks/8AWSEWVMzMR0SXvV0jathMNyrbfzGgYctW0PV6XLRhJzEL+YpojV+UI/IadSMY2Rbd8OxPafXYLb3MKttp0Fc1DkuYyy2R8o222ye8slco4azNKdOJIVpzDzLt6yugz1zM6mPY1lPUFirBYyzXUTMuXkqXGHFISy4tpaiabXogktESkGalbyeAsGczoMb2ibHs0yiYwzR2eCOwkXElBraXMNcN1KFL0PRZpS4Za8TMjIuIfCiIvkvd94xtJhZLdZfHbfqvkvH30MKsYtuxJ3lcnvPE8hB6x1NqJSTS4ep7pnwIc6s2lYjdUsy4rsqpJ9RDPdk2EWxZcjsH5luJUaU/3mPLO1CmdmWu31uPCdl1LGT41LuIMNo1rkV6I0RyUkkJ4q1QSlKIuciVz6ie222+O5mztXv9nqI72Is7PlQbOfWsclEfm9tIVHbIyIiW421yuplruksknpzFMWMVXf69t5e9oUOa47lUqdGpb6suJMFfJy2YExt9cdXEt1wkKM0nwPgenMY2h0YdLX432VGFs1MGNWsvYXYMLaiNJbSptuTENtOiSItE7ytPNqY7zHPVERdd0rAAAoMK58Z8Y6w9/DuCoEvc+M+MdYe/h3BUD1c28m0+r9tLOvoZ2R+L1p1V33DHFx78wVnVWvcIcrI/F606q77hji49+YKzqrXuEMMv5+nu900amgAAOFcAAAAAAAfM5DSTMjdQRlzkaiExmDy5tvT0huKbiTEPvyEtqNKnUNbhcmZlx3TNwjMiMtSTpzGZHw+99i+ha43UHoRFxgtHw9Ee5kubYtrKLW0ruv1XRf03bY6YRMxCy7aZ/XN+kQdtM/rm/SIRve+xbo1T+oNfhDvfYt0ap/UGvwjr0RZdbPlj8lcULLtpn9c36RB20z+ub9IhG977FujVP6g1+EO99i3Rqn9Qa/CGiLLrZ8sfkYoUGR1UHJ6d+tlTZUZh7d3na6e7DfLRRKLddaUlaeJcdDLUtSPgZj/PTs++xplUVrjeU4bJuLRNpJbrJrUuzkTnu2jMijr33VrWepFuc+hbidOce6+99i3Rqn9Qa/CHe+xbo1T+oNfhG1lm6zsqr4tZ8sfkiaolx9heAs7ItkmM4kqaUp+ui6SHlOb2++tSnHTI/NvrVp9mguu2mf1zfpEI3vfYt0ap/UGvwh3vsW6NU/qDX4RlOabKqb5tZ8sfknFCy7aZ/XN+kQns9xWJn+OPUsi6sKmM+ouWdqJZR3nG+O82a9DMkqI9D3dD8xkM7vfYt0ap/UGvwh3vsW6NU/qDX4REZpsom+LWfLH5GKFNTQazHqeDVVqGYlfBYRGjR21eC00hJJQkvsIiIv7hzO2mf1zfpEI3vfYt0ap/UGvwh3vsW6NU/qDX4Q0TZdbPlj8jFC1Q4lwtUKJRedJ6j9Dry3xSqoKuZZ01fEp7KIyt5qRCZSyZmlJmSV7peEg+Y0nrz8OOhld18sp8CNJJO6TzSXCT5tSI/wDyPMy3IuCxTVTVfE9l3vK0TfqcgAAeWkAAAAAAGHJ8faXqE334wpxMSfH2l6hN9+MKcevm7kV/V7Qzr1gldn/iLj/UGPcIVQldn/iLj/UGPcIc+cees+6r70po1S3wABwLgmtomA1+0zGFUVo9JYiKlRZZriKSlzfYfbfQWqkqLQ1NpI+HMZ6GR8RSiMyFtGRZWqmmflayLCblORDM9x9bi3EJ5Qv9ZKSaVok+GqtTIzJJl2ZJk85TaxRE3Xcd4ru2mf1qPSIO2mf1zfpEI5WAYupRqVjdQZmepmcFrj/+I/ne+xbo1T+oNfhHt6Isutnyx+SmKFl20z+ub9Ig7aZ/XN+kQje99i3Rqn9Qa/CHe+xbo1T+oNfhDRFl1s+WPyMULLtpn9c36RB20z+ub9IhG977FujVP6g1+EO99i3Rqn9Qa/CGiLLrZ8sfkYoauXYvW5pDYjTbGzhoZc5RKqi3kV6zPQy0UthxBqLjzGZl5RnYps7psPs1TodxfzHVNm0bdrkc2e1oZkepNvvLSSuHztNS48eJj5977FujVP6g1+EO99i3Rqn9Qa/CJ0VZ3XfFny//AEYoWXbTP65v0iHhz/1NNkUvIcfpNoFI489JrTTWWEdhwz3mFrM2V7pfouLUk/8AiJ8w9Y977FujVP6g1+EO99i3Rqn9Qa/CL2WbLKyriqLSfLH5E1RKT7FLZUWxTYpSUc1/fupBHYWanHd4+2XCTvJ4n/qJShH/ACa+UdvdtM/rm/SIRve+xbo1T+oNfhDvfYt0ap/UGvwitWarKqZqm1nyx+RihZdtM/rm/SIO2mf1zfpEI3vfYt0ap/UGvwh3vsW6NU/qDX4RXRFl1s+WPyMULLtpn9c36RB20z+ub9IhG977FujVP6g1+EO99i3Rqn9Qa/CGiLLrZ8sfkYoWXbTP65v0iDtpn9c36RCN732LdGqf1Br8Id77FujVP6g1+ENEWXWz5Y/IxQsu2mf1zfpEP0h9tZ6JcSo/MSiMRfe+xbo1T+oNfhH8PZ9jGh7uO1bZ/pNQ20KL7SUREZH9pCNEWXWz5Y/IxQuAE5g89+TBnxJDy5K66YuIl5w9VrQSUqTvH5TJKyIz8ump6nqYox8/b2U2FpVZVa4XAABiMK58Z8Y6w9/DuCoEvc+M+MdYe/h3BUD1c28m0+r9tLOvoZ2R+L1p1V33DHFx78wVnVWvcIcrI/F606q77hji49+YKzqrXuEMMv5+nu900amgAAOFcAAAAAAEfknj/jfUZ/vRhsDHyTx/xvqM/wB6MNgfbZH+ksu6f+1TKvWAJLaFtLrtnTFYUmHPtrK1k9qV9VVMk7KlOEhS1EklKSkiShKlKUpRERFxPmElV9klQWmMO2yKW9RL+V3aKPRHFQqxky20Et1tLSXDJO4RL1NakkRIMzPQ0mfRfEKO2gHQu0jsmHKfZlkFxQY7as5LTz4UKbT20VCHoRSHUElxxPKklSFpMyQpC1FvmnXgStO6MbuHr+ljT5FTOo3niUaoFlyfbDWijLw+TWtHHTUtFHwMuY9SCJieIaQDpfsldqGRYDExSrxqvtVzchtm4B2VbGjSFsJ3FrUhtD7iUm8okHu75GgiJZmepJI9PJ9tkfZWhmLkVLlc+BBYYKwysq1rtNs1EkjcdNCk+U9Vcm2aU8ebQL4HaoDpLaRtUtKfL86qa+XZVr9NhEu4iNvQIyojzqeKJLb2+pwzQrwDbW2ST0M+PDWqwvagzNVgtDYlJkX97jpXJyktoJk+TSwTu9oZaKNT5GRJTpz83AgxRfcOwwHUtn2SuOVqY5FVXkx6TkMzGGWIkVtxxyZG397QuU+Yo0GSVH5yNRJLUy51rt0jVsmLAaxDKLS9VETOmU1dEZekVzSlKSg5Bk9yaTUaF7qUrUo909CDFA7MAdSO9kvjcp6mZoam/wAsfs647YmaaCS3I0YnTZNbqXFoMjJxK0GhJKVqk+A7bExMTqGXlXixcdTe9wxt454vVfVWvcIYmVeLFx1N73DG3jni9V9Va9wh5GduYo75+0NaNTRAAHy64AAAAAAMOT4+0vUJvvxhTiYk+PtL1Cb78YU49fN3Ir+r2hnXrBK7P/EXH+oMe4QqhK7P/EXH+oMe4Q58489Z91X3pTRqlvgADgXBGOf6TLT+yIX76ULMRjn+ky0/siF++lD2s089V9PvCJ1S2wAQWf7Yq7A7qJTJp7rJLp+K5POuooqX3moyFElTy95aCJO8ZJItTUo+BEY+lmbmC9AdVR+yJo7HF8Xtqymu7qZkkZc2BS1jDT0w46DIlPL/AChNtoI1IIzUsuKyT87Ui+R9krjrjdI3Ep8gn2lrKl17dQxBSUtiVGSlTrDyVrSSFElRK3jPc047xFoZxigdtAOsmOyCxx/Dzu+0rdMv5UXSFQdqEqxVPRqaoxNpUaTUSSNeu9u7pb29oOBYdkzjVJi2QXFvU31PIoXobVjTzIaSmsplOk2y4SUrNC0GZnxQtXzFERGZaGxQO3AHXVhtglV9JGsD2eZnIekPuNIgR4LC5BISlJ8qvR/dQk97QiUolmZGW7wEhnfZHuRcX2c5BiFHYX1fk92mvebSw2l9pKSdJxjdcdb3X99tSS11SXJuamXg6sUDvQB1zkG2yJRz4tYxi+SXl2qE1YTKqpiNPP1zTmu72wZuk2SjNKyJKVqM9xWhGXEVmG5fV57jFff0z6pFbOb5Rpa0GhRaGaVJUk+KVJURpMj4kZGQm8bQCC2gbYa/AL6royprrI7qxYeltV9HGQ86hho0kt1W+tBaEa0loRmozPgRjQp9pdbc5LklG3FnMTaGHEmy+XbSlJokIcUhKfC13iJpRKIyLQzLQz46L41CtAdPxeyZpbVqhKoxnJrubdUaMgjQYERlTpRVLNB75qeJCVEempb3HeIkmo+A+uH9ktjmZy8aKPT38CsyRamau3sISWosh9KFLUzrvmtKyJDhEakElRoPdUoRigdtgOk1dldjxRIU4sXyxyqm2LlRHsW65tbTk1Li2yYSROmszUtBpSokmjUyI1EZGRU1Ttug3FNkklrG8jRb4++1GnY6cNC7BKnCSps0pQ4pCkqSslEol6ERGZmWhhigdjAOpH+yXxuvwrKcis6m9p14w7HataedEQicxyykE0vdJw0KSolkZGlZ8Eq8paDaotskS2ymooZ2OX2OS7duS7XruGGW0SSYS0pRESHVKSo0u7xJWlJ6IXqRGWhr4HYICcwfO4G0CHaS61mSiLBspNZy0hKSTIWwvccW0ZKPVvfJSSM9DM0Hw00M6MSMvAP8vk/9rr/cMisEngH+Xyf+11/uGRWD5LOX6uv/AF9odAAAPNGFc+M+MdYe/h3BUCXufGfGOsPfw7gqB6ubeTafV+2lnX0M7I/F606q77hji49+YKzqrXuEOVkfi9adVd9wxxce/MFZ1Vr3CGGX8/T3e6aNTQAAHCuAAAAAACPyTx/xvqM/3ow2Bj5J4/431Gf70YbA+2yP9JZd0/8AaplXrdG9lJHsIsbCLyiWiPkdZcmcKSc6IwtKVx3UuoSiUttt7fT4Jp30qIvCLXdMj6jp8BjbUcYrrelxs8vucXyqdJySgyR6IpFtIlMJ5ZTbrSlx95BLZNBEZJTuaGZGWp+usixaly+v7RvqiBdwd7e7WsYyJDevn3VkZaj7UlDWY1XNV9RXRKqA1rycWEwllpH7EpIiL/oNppvlR0Lc7HpF7sOy+txzZpU7O76fIivMVjD0femFGeafb5ZxktxKjNLiCLeURa668T07Hh7asfq4jTeb2FRs/vVlvqpbm7h9sIRqZJX4LhkZK0PTQ/IOwR8nYjDyt5xltxXNqpBGYtddqHUudORtr0rZ1ZYVY1uTwKPLWZtjJrJ7DyI7SYkhKjMyXxMjeb8EtVeER6acR1btx2I5dn1ptIjuYc1lsq4ZSnHLybaNNx6lkmEkplLKj3kO8olw95KdFmst5SSI9PVzTDbCTS22lsjPXRCSIfsRNN+sdF5TszyPMs1tZvyf8nQ7TZ0/Q8rIfbVyE11wz5JRIUZnukepqSRp4cDMZlTjGeUMnZZlKsMXLsKOhlY9aUkeyjcs3vGwTb7bi1pbWg+19TLeJRE4XAzIyHocAwjzNhGyjN2LzE7K3oW4LkfPbq/mttTWnkMRpLMgmlpVvEayNTiS0It7jqaU8dOVtK2MyE7ZbnLnNmlXtTqb2DFZONLdjNyK2QwSk6p7Y0SbTiVJ13T3iNPMfl9HgGGLrh5s2qbMbt7G8fhYTsyao7+HW61tzQXLEJNFLcWa3GVF4BvMbxmpRElRL1V4BGeo9HRUuojMpfWlx8kETi0loSlacTIv2j6gJiLhl5V4sXHU3vcMbeOeL1X1Vr3CGJlXixcdTe9wxt454vVfVWvcIeTnbmKO+ftDWjU0QAB8uuAAAAAADDk+PtL1Cb78YU4mJPj7S9Qm+/GFOPXzdyK/q9oZ16wSuz/xFx/qDHuEKoSuz/xFx/qDHuEOfOPPWfdV96U0apb4AA4FwRjn+ky0/siF++lCzEY5/pMtP7IhfvpQ9rNPPVfT7widUtsebeyKVe4/tWx+9xV9cK1epZMCY7Gsa5t52NyqVpImZjrZeAo1KJ1Jq0M9DToY9JDCybA8ZzUmCyHHam+Jg9Wis4LUnkz/AKu+k9P7h9JMXwweW6nZfT5fQ7NM2xDAWc9xGDRv0DuNXzkYpTe5IPSS2tw+RWvlG3SUZKIlJXqngfDsii2WTIOX7KraqwOvwmvrpNtJtq2ueYNEVT0fkmlK3N0nFrJCCPcJWnNqZFqO9IkRiBFajRWW40dpJIbZZQSUISXAiIi4EReYh9REUwPK2Wdj9kd18t2ruNQL5cTPpOQR8fs3mjYt4DsNphRamakoXqSjTymmho4kWpGNe52RSLjZRcxce2T1uBW8u4qnO0Ij8TlZMaPMYeU46toybLdInjJO+o9C4cVaD0kAYYHS+3bCbzKcrxSWWNLznEojUpE7HET2oyFyVcnyD7iXVJQ8hJE4W6ZnoayMkmISi2QZvjexrEq9nGY53WJ5o5eIp409om5cVT0hW6w6oySWiJOhE4SD/JnqRalr6jAMMX3jzFluyazsNp0/N7PZHXZ7FyKtiNvVFhKhnLppLBKToS3T5NTa0qTvGhRnqnmPy9rUudYBsuoqzHrK1xPAJbEZDrmPfKkZhMRTnhrSlJmjVO8pR726W9z+Udjj4uQ2HlbzjDa1edSCMww3ah0PtpNra/SQpeB0bGdS4iZKIOUY3kceM9TTTSjd0cJZapMjI1pIz4EnVCtS0/MPF9o2E5deWhULWYzMmxyshS50eczFbj2EZt5DinErMlcks3t4jbSoy0Mt0d+tMtsJ3W0JbTrroktCH7DD0jz3sG2VZTheR4NKuavtNitwBqklL7YaXycwpKVm1olRmfgkZ7xap+0cXFtkmWVuyrYfTyKrk7HG8lRYWrPbLR9rsE3NI17xL0Xxeb4IMz8Lm4Hp6OAMMDzjU7JMsjbJsEpnardsqzPCupbHbLR8nD+VH3+V3t/Q/wAmtKt0jNXHTTXUg2i7LM3tMh2oTamE65AuZ1E6mNHsERXbWHHRuzIyHCURtGovB1Vu6lqWuh6j0cAYYHkGRsKylVFtUi02z2PjELJW6R2sq40+Mrk1RZOr6XdFElLhp/KeCakmXDeNXAdn9lvKXTbO6+/qpcaNmNJaRplCy8siXLkGsmVx0p1I177by0mkvOXmHd44M6hrLOfBnTK6JLmwTUqJJfYStyOaiIlG2oy1SZkRa6aa6EGHiugYeyzCG9nGzrHsaQvll10NDTz2upvPGW864f2qcNaj/wB4VQALahl4B/l8n/tdf7hkVgk8A/y+T/2uv9wyKwfJZy/V1/6+0OgAAHmjCufGfGOsPfw7gqBL3PjPjHWHv4dwVA9XNvJtPq/bSzr6Gdkfi9adVd9wxxce/MFZ1Vr3CHKyPxetOqu+4Y4uPfmCs6q17hDDL+fp7vdNGpoAADhXAAAAAABI5SkmMyxuU4e40bUuKSj4Ebi+SUlOvnMml6F9g1hoWFdGtobkSYyiRHcIt5tZakeh6kf2GRkRkfORkRkJ5WzeoUfCRcpLTQkpu5pEX93Kj6PJM4WNnY02VrfGHZET0zO2Nqs03tEBm97Wo+lXft2b8UO9rUfSrv27N+KOzSWSbavLH5K4O1pAM3va1H0q79uzfih3taj6Vd+3ZvxQ0lkm2ryx+Rg7WkAze9rUfSrv27N+KJDadiTNBU071bY3bDz95WxHVfLMxe8y7LbQ6nQ3D01QpRa+TXXUucTGcskmbr6vLH5GDtdggM3va1H0q79uzfih3taj6Vd+3ZvxRGksk21eWPyMHa0gGb3taj6Vd+3ZvxQ72tR9Ku/bs34oaSyTbV5Y/IwdrSAZve1qPpV37dm/FDva1H0q79uzfihpLJNtXlj8jB2vlmD6I+K26nD0I4riCLyqUpJklJeczMyIi8pmRCjpoy4dRBjuFo40w22ovtJJEYy63BKismNSkplyn2j3m1T578omz4lqknVqJJ8T4kWvEUA8nL8ss8pimiyibovnj4veV4i4AAHjJAAAAAABhyfH2l6hN9+MKcTEnx9peoTffjCnHr5u5Ff1e0M69YJXZ/4i4/1Bj3CFUJXZ/wCIuP8AUGPcIc+cees+6r70po1S3wABwLgjpaSjbR5C3D3e26phDOvDfNp141kXnMuWRr+0hYjg3FJCvopR5zBPNpVvoMlGhbatDLeQtJkpJ6GZakZHoZl5R35FlEZNa4qo4pi6SeOLnFAZytm9StRqOTdEZnrwvJpF+9H872tR9Ku/bs34o+g0lkm2ryx+TPB2tIBm97Wo+lXft2b8UO9rUfSrv27N+KGksk21eWPyMHa0gGb3taj6Vd+3ZvxQ72tR9Ku/bs34oaSyTbV5Y/IwdrSAZve1qPpV37dm/FDva1H0q79uzfihpLJNtXlj8jB2tIBm97Wo+lXft2b8UR+1vE2cawKwsauxu485p2MlDnyzMXoSpDaFcDcMuKVKLm8omM5ZJM3X1eEfkYO12EAze9rUfSrv27N+KHe1qPpV37dm/FEaSyTbV5Y/IwdrSAZve1qPpV37dm/FDva1H0q79uzfihpLJNtXlj8jB2tIBm97Wo+lXft2b8UO9rUfSrv27N+KGksk21eWPyMHa0gGb3taj6Vd+3ZvxQ72tR9Ku/bs34oaSyTbV5Y/IwdrSAZve1qPpV37dm/FH9Ts2pi1Jxy1kIPnbfuJbiFfYaVOmRl9hiNJZJtq8I/Iwdr87Pkk4zeS0HvMSrR1xpZcSWkkIbMyPylvIVxFWPwyy3GZbaabS002kkIQgtEpIuBERFzEP2PmsptuEW1Vrdde0AABzDCufGfGOsPfw7gqBL3PjPjHWHv4dwVA9XNvJtPq/bSzr6Gdkfi9adVd9wxxce/MFZ1Vr3CHKyPxetOqu+4Y4uPfmCs6q17hDDL+fp7vdNGpoAADhXAAAAAAAAAAAAAAAAAHX+2gzKhx/Qkn/jNTfO0+ns+f/wDfm4jsAdfbajJNBj2uundPTF4KtP6eyLUcqES7BAAFUgAAAAAAAAAAAAAAAAAAAMOT4+0vUJvvxhTiYk+PtL1Cb78YU49fN3Ir+r2hnXrBK7P/ABFx/qDHuEKoSuz/AMRcf6gx7hDnzjz1n3VfelNGqW+AAOBcAAAAAAAAAAAAAAAAAdf7ej02W2vMf5eHz6af50z5x2AOvtvZkWyu1M9dOXh8x6f0tkWo5UDsEAAVAAAAAAAAAAAAAAAAAAAAGFc+M+MdYe/h3BUCXufGfGOsPfw7gqB6ubeTafV+2lnX0M7I/F606q77hji49+YKzqrXuEOVkfi9adVd9wxxce/MFZ1Vr3CGGX8/T3e6aNTQAAHCuAAAAAAAAAAAAAAAAAOvttSzRQY+ZKUj/GemLVP2z2S0/YfMOwRAbZzMqHH9DMv8ZqfmWSf6cz5f/Hl5vKLUcqESvwABVIAAAAAAAAAAAAAAAAAAADDk+PtL1Cb78YU4mJPj7S9Qm+/GFOPXzdyK/q9oZ16wSuz/AMRcf6gx7hCqErs/8Rcf6gx7hDnzjz1n3VfelNGqW+AAOBcAAAAAAAAAAAAAAAAAdfbe1GnZZamSjT+Xh8U8/wDnbI7BHX+3gzLZba6GZHy8PmVu/wBKZ8otRyoHYAAAqAAAAAAAAAAAAAAAAAAAAMK58Z8Y6w9/DuCoEvc+M+MdYe/h3BUD1c28m0+r9tLOvoZ2R+L1p1V33DHFx78wVnVWvcIcrI/F606q77hji49+YKzqrXuEMMv5+nu900amgAAOFcABOXl/OOyVVUyI5zW20vPyJeqmmEqNRILdSZGtR7p8NSIiLUz5iPewsK8orwWesUYCM3sz+t6L2Q9/NBvZn9b0Xsh7+aHp6Jtvmp9dyL42rMBGb2Z/W9F7Ie/mg3sz+t6L2Q9/NBom2+an13F8bVmAjN7M/rei9kPfzQb2Z/W9F7Ie/mg0TbfNT67i+NqzARm9mf1vReyHv5oN7M/rei9kPfzQaJtvmp9dxfG1Zjx72bPZR2mxW/oKGRhB2NY7LhXMS3RaG0TxxpCHXGDRyCt09UEWu8fBZHp5B6P3sz+t6L2Q9/NDrzbPsKkbeKqpgZTYVDjNZOROYXGq3UL1T85szOQfgLLgoi0M9C0MtBtZZrrpria6qZjvnciZjol2Psizaw2kbNqDKLSiPGpdrH7a+TFSe2DZbUo+TM17iNd5G4v5pab2nHTUV4im05gy2ltu1oENoIkpSmneIiIuYiLtofrezP63ovZD380MpzVazPFVT67k3xtWYCM3sz+t6L2Q9/NBvZn9b0Xsh7+aEaJtvmp9dxfG1ZgIzezP63ovZD380G9mf1vReyHv5oNE23zU+u4vjaswEZvZn9b0Xsh7+aDezP63ovZD380Gibb5qfXcXxtWYCM3sz+t6L2Q9/ND9Jk5hHI3FS6Sfu8eQTCejb/2cpyzm7rw47p6eYw0TbdFVPjO4vjasQHAo7li/q2J0dK0Ic3iU25pvtrSo0rQrQzLeSolJPQzLUj0Mxzx49VNVFU01RdMJAABUYcnx9peoTffjCnExJ8faXqE334wpx6+buRX9XtDOvWCV2f+IuP9QY9whVCV2f8AiLj/AFBj3CHPnHnrPuq+9KaNUt8AAcC4AAAAAzcgu26Cv7YU0p91biWWWEGRG64o9Ep1PgXHnM+YiMxeiiq0qiiiL5kaQCNN7MXfCKwo42v+yOvee3fs3+XRr+3dIfzezP63ovZD380PX0TbfNT4zuRfG1ZgIzezP63ovZD380G9mf1vReyHv5oNE23zU+u4vjaswEZvZn9b0Xsh7+aDezP63ovZD380Gibb5qfXcXxtWY8pdnZ2RFlsYo62nPDzt6m8JC0W5WPI8i8y+hxTRt8krXVKUmSt4vnHw8Hj31vZn9b0Xsh7+aEJtp2Mz9vOFLxjJrSq7ROQ3JbeiVbqHmnEHwNKlSFEWpGpJ6kfBR/tGtlmu0pria6qbv8Ae5EzF3FKq2BbUbPbNsxrcvssb7lSslLXFgnN7aUpgj0S4auTb03jJRkWnNoevHh2IIOrrsopayJXwbHH4sKIyiOww3TvElttKSSlJf4VzEREQ5O9mf1vReyHv5oUqzVazMzFVN3fO5N8bVmAjN7M/rei9kPfzQb2Z/W9F7Ie/mhXRNt81PruL42rMBGb2Z/W9F7Ie/mg3sz+t6L2Q9/NBom2+an13F8bVmAjN7M/rei9kPfzQ/jl5kdA05OtXK2yrmUmt8oMZyO62gi1NZEp1ZL0LiaeB6a6anoRxOabe7/jMTOzj94L42rQB+ULS6hK0KJSFFqSi5jIfoeMkAAAAAAGFc+M+MdYe/h3BUCXufGfGOsPfw7gqB6ubeTafV+2lnX0M7I/F606q77hji49+YKzqrXuEOVkfi9adVd9wxxce/MFZ1Vr3CGGX8/T3e6aNTQAAHCuCLgGZ5xk5a8CKL+7MWgi4Hjxk/7Iv7sx7maOdr+n91KJ1S3AAB9IwAAAAAAAABwZl5X19jXwJM1hidYKWiJGccInHzQg1r3E86t1JGZ6cxAOcAAAAAAAAAAAAAAAAAAADO2bqNVDMMzMz+VbAuPW3RUiV2bfmCZ/a1j/ABjoqh8dnD9Xa/VP3dAAAOAYcnx9peoTffjCnExJ8faXqE334wpx6+buRX9XtDOvWCV2f+IuP9QY9whVCV2f+IuP9QY9whz5x56z7qvvSmjVLfAAHAuAAAAlM+MyXjeh6a2zev8A2nRViTz/AOfjX9rt/unR6Obv1VH86BqAAD69zgAAAADHTl1SvL3MXKXretwU2SovJr4R1OKbJe/pu/PSotNdeGumggbAD8PPNxmXHXXEtNNpNa1rPRKSLiZmZ8xDj1NrDvauJZV0pqbAltJfjyWFEpt1tRapUky4GRkZGRiRywAAABj5Fl1TiaqpNrL7VO0nN1sMuTWvlZCyUaEeCR6akhXE9C4cTGwIAAGPVZdU3d7eU0KXy1lSONNT2OTWnkVOtk62W8ZEStUKI/BM9NdD0PgA2Bm5KemOWvVHfcMaQzcm8XLXqjvuGNKOXCY1tfFz1xqp6oz7hDTGZi/izUdTZ9whpj4G25yrvluAADIAAAGFc+M+MdYe/h3BUCXufGfGOsPfw7gqB6ubeTafV+2lnX0M7I/F606q77hji49+YKzqrXuEOVkfi9adVd9wxxce/MFZ1Vr3CGGX8/T3e6aNTQAAHCuCLgePGT/si/uzFoIuB48ZP+yL+7Me5mjna/p/dSidUtweY9v2SzI20y0q7fJMmxuvTjXbWNN42t5BzrHfcJwlckkzdWn8gRNK8EyWZ6cdR6cHRu2/Yve57mEW5pa+jfNMBMNciZdWlZJQaVrUnjDWSXUFvmZJURGRmrwtD0L6Gq+7iYOtY7u0nK8locDWc1iTQ4dVzZkNGVP1Eh+W6lSXnVPoZeceJCkEjdNRJJRmZ7+paUMKlzmbnOzPD81yqyjyHqS5dsToLVxrtxLciP2sa3UIbUa0trTq4hKFGe9xIlKI+xo3Y+0d9iOKws5U7lWRUsJMU79Ml+LKd4eGRutOJcNJ+ZSj15z1MzFfW7NsbqLChmwqxEaRRQXK2uUh1ZJjx17m+2Sd7Q9eSRxURmW7wPietYpkeXlZ9nNrXYfgMK1lznZWTZBUO2T1wqvly2IDquRaVMS04pK1JPU1JTvKJnTeLUzGzlFRtOxHEK2qt8kl1DVjm1PErZEO7csJsaM6skPtOSFstG6ne8JJLSrgrRW8REO9LPYfg9zj0qkm0Lb9dJs3blSFPuk4iY4s3FvtuEvfbUalKPwFJ01Mi0LgPpX7F8OrKOBUR6hRQYNo1dMpclvuL7cbUSkPKcUs1rMjSXzjMj0IjIyDDI6s2m0Lyb+i2fYvPzSxu2YEi1cWjLnoKG2FuklLsiUpLrjiiXqlDZEaSLe1LQiEBTNSdsTnY2W2SW9sm0sotoxKlVlm/CWtTUZfhpNlSd1atzwlJ0NRakfDgPTGbbIsT2iWMOffVRy5kRpbDb7Ul6Oo2lGRqaWbS08o2ZkRmheqfsGfL2CYHNxSuxtdCTdNWylzYLEeU+yqI6tSlKNlxCyW2RmtXgpUSdD0004BNM3jpvKHs+2n7YM9pKZ+UzCxc4kSIxFyt6mW0bsdLvbDiG4zvL7ylGRb57pE3pu66mfoTZ+xkMXCaRnLH40rJG4jaLB+Gf5J14i0UtPBPOfHmLn5hO5TsAwLM5kSXbURvy40VMFMhmZIYccjp5mnVNuJN1JeZw1c5+ccq1pc+Ymqaxy7xetpGkIbixJ1HIkOtJSgi0U4iY2k+JHpogtC0LjpqdoiY4x1p2QB3mM5tEy22sska2awq9CJXcvZHGdrpJPGapL7JaHIaNBoSZeFuklR7h66jjzcutk4b2TEhN1NS5VHLVWulKWSoafkhlxBsnr+TLfM1Fu6eEZmXEX1hsLpc9ej2e0KFAyG+aQTC3q8pUOI6ylZrbbcjnIWlwiUpR6LNRamfAhzMu2B4HnVnZT7qi7akWTBR5xNzH2W5SEp3UG6224lC1JLglaiNSdC3TLQtIukdHkm5zWdnxScvyaC3SYVUWcNuttnY5JlORpKlPK3T1UZm0nUjPdVqZqIz0Mufhlje0FtsfslZTeWjmbY3LlW7NlOU8yb6YTUhDjLZ+CyZKUotGySWh8S14jviHsvxivXbrj1nJqtq5iqmn2w6fKxWULQ03xV4O6l1ZbydFHrxM9CH9a2YYyyWMEit07moi4VT+Xd/wAGZW0TKk/O8PVtKU6r1PhrrrxDDI8xYbXXlpjuwKdIzzMFyMxQca5P5ad0fbKE4+kklzNqI2klyiN1ZkajNRqPeG21lU9rFb/Cn7bKbqzbzuRj9H2jcHFnPtJitydx+aojWTaErcM18VmSUlxHf9fsqxaqhYnEi1fJR8UMzpkdsOn2rq0pnnNWq/AWovD3ufXn4jhW2xHCryFYxZlMa27C1+XH1ty323Sncmlvl23ErJbStxCU+AaS014cT1jDI84llGcN7JchoZeRWdZc1O0Ovo49g1ZnLksR3XIiuTVINCDfIuWWWq0+EXgqIyIdkZzZp7G7K8dyOXfXc3CpcSZW2abe0fmEzJJCpMd4uUUeilcm81w0Lw2y8hC/r9geBVUOREh0CY8aROi2brTcp8krlR1Epp4/D4r3kpNSudZl4e8OJto2aWW1yPUY46dWjEFTGZluclK1y3CZdQ6hplJaJIlmk0qWo9SSZ6EevBhmIHO2FwbyJsto38llyZd/YtqspvbTqnFMuPqN3kU7x+Clslk2SS4ESOAvQAaRxDN2bfmCZ/a1j/GOiqErs2/MEz+1rH+MdFUPj84fq7X6p+7oAABwDDk+PtL1Cb78YU4mJPj7S9Qm+/GFOPXzdyK/q9oZ16wSuz/xFx/qDHuEKoSuz/xFx/qDHuEOfOPPWfdV96U0apb4AA4FwAAAEnn/AM/Gv7Xb/dOisEnn/wA/Gv7Xb/dOj0c3fq6P50DUHXm3aiyzIcBXFw6W9Gs0y47zzMWZ2m/LjJWRvMNSNPyK1pIyJfk85a6jsMYOaYPS7QqU6m+iLmQeVQ8SW5DjC0rSeqVJW2pKkmR+UjIfXTxw53mKy2g22VJ2e4Phs3ICbnyLduzbv75yvtCkxFINUJU1DbyyNHKGfgampKE+H87XUu6raTjFNh9JkGRza1i0zqNEiu112uZMRAchyOUjuyVMtm54aTNJqSai1Tx3kJUXc0nYFgEvD4mMOY4z8kRJKprCUPupfbkGZmp4nyWTpOGZnqvf3j14mOdX7HcQq6mlrY1SaIlPZFbw0qlPLUiXurTyylqWanD0cXrvmoj14lwIZ4ZHQOSZnkeAO7QMLrMlsyiJySiqoV1aSVS5NWxYIb5dfKu6mrdPe3DWZ7prLjwFfs+wpvBeyls4DVzdXaF4XHdJ68nrmPJPt50jInF8d3wddOYjUemhcB2rbbJ8SviygrGlZnIyZLKLZD61rRJJpO614Jq0QaSItDRoepEfOWowKrYdT4AuVZ4DGi0+SPxkQjsLlyZZIOOle/uKQqQlR+XQ94jLhzkWgnDN4qdpNei22fZJEW4+0h2ufSa4rymXC/Jn81aTJRH+wx5wxxqbhvYzbH4NFfW8A8ylUldOsV2Dj7kFp5jVwoxuKUTGpoJtJIIiSa+BakQ79o6zaGi0YO8vsXnVXEn48GikMPLLdPQkrXMcSXHTXVB8NS8uo4Nb2P2AVONW+PRsfSVHaGhUmvdlPuskaFGpHJJUsyZ3VKMy5Pd0PQy5iEzEyJrZk7PxDbbleBovLTIaJimhXDK7iWqY/CecdebWybyzNZpUTaFkSjPTjpwMW+2Gnya/2aX9fh1iVVkj7BJhyjc5M0nvJNSSXoe4pSSUklaeCaiPyDNr9lLez2pkM7OEVlFYTZKXp0y7Zk2a5KUoNJEpZyEOGZcNDUsyItSIuOpHMHyjLYcunzuyx28xuW3uvw6yqlQXlKJRKQZOnMXu6Gkj4ER8C0Mg47rh0e/ew51RgdYlzJGbap2k18eyrsqnduyoTyoziiQl/VXKNKIyWlW8eu+fNzFxtr+ZZCzd5Nm2HzMjbr8bvolbLkzMgNqvU6l5hp+O1Xk2onUflNDWtSVbyjNJmSSId8p7H7AE4fKxg8eQ5USpaZ7yXZL631yE6brxyDWbu+W6RErf1Ii0I9B8cg7HPZ1lNlZzrTHESnrJXKSkHLfSy47uknluSS4SCd0IvypJJfl3tRXDNwi6eqsNs203aK3aZdkNDFxmyZq6+robFULk0drtunJd3f8AKm4pw90l6pIkaaGOs8xym4qNvm0OnQuypMQn5BRM3+UwXuSciNKgNJZaSslEpHKukhC3S+YlXnURl6Jy7YHgmdXHyrdUXbNibCYzshqY+wqQ0nmQ9ybieWIvM5vDVl7K8VntZS1JqG32sobQ1bocdcUmUlDJMoLQ1eBo2ki1Runw15+ImaZFUlJJSSS10ItOJ6mM7JvFy16o77hjmQYbVdCjxWd/kWG0tI5RxTit1JaFqpRmaj0LnMzM/KY4eTeLlr1R33DG9ny4TGtr4v4s1HU2fcIaYzMX8WajqbPuENMfA23OVd8twAAZAAAAwrnxnxjrD38O4KgS9z4z4x1h7+HcFQPVzbybT6v20s6+hnZH4vWnVXfcMcXHvzBWdVa9whysj8XrTqrvuGOLj35grOqte4Qwy/n6e73TRqaAAA4VwRcAj7uMnP7Iv7sxaCau6KezaLtqZMd6S82lqTElOG2h0k67iiWSVGlRbxlzGRkfk01HrZttqLG1qxzdfF3rE+yJ44uc0BicvlnR6B7VP4IcvlnR6B7VP4I+l+LZfPT5o3s8EtsBicvlnR6B7VP4IcvlnR6B7VP4IfFsvnp80bzBLbAYnL5Z0ege1T+CHL5Z0ege1T+CHxbL56fNG8wS2wGJy+WdHoHtU/ghy+WdHoHtU/gh8Wy+enzRvMEtsBicvlnR6B7VP4Ix8my/IMTiQ5M3HYptyp0avb5KzMz5R91LSNfyRcN5Zan5hPxbKf76fNG8wyswGJy+WdHoHtU/ghy+WdHoHtU/giPi2Xz0+aN5gltgMTl8s6PQPap/BDl8s6PQPap/BD4tl89PmjeYJbYDE5fLOj0D2qfwQ5fLOj0D2qfwQ+LZfPT5o3mCW2AxOXyzo9A9qn8EOXyzo9A9qn8EPi2Xz0+aN5gltgMTl8s6PQPap/BH6QeWSNWyqK2EauHLuT1upR9u4TRGrycN5OvnIJtrGOObSnxjeYJffZuRpoZhGRkfyrYnx626KoZ9DTNUFUzBZWp0kGpa3V6bzji1Gtaz0Ii1UpSlHoWnEaA+Oyq0ptsotLSnVMzPq1AAByjDk+PtL1Cb78YU4mJPj7S9Qm+/GFOPXzdyK/q9oZ16wSuz/wARcf6gx7hCqErs/wDEXH+oMe4Q58489Z91X3pTRqlvgADgXAAAASmfEZrxvQua2b/dOirGZkVGm+ryYJ048hpxL8d8k73JupPVJmnyl5DLhqRnxI+JdmR2tNjlFFdeqJHyAYpuZa0e6dHWvGX+0bs1JSr7dDZ1L9nHTznzj+cvlnR6B7VP4I+v+LZdZT5o3ssEtsBicvlnR6B7VP4IcvlnR6B7VP4IfFsvnp80bzBLbAYnL5Z0ege1T+CHL5Z0ege1T+CHxbL56fNG8wS2wGJy+WdHoHtU/gjIyzL8gw2hft5+OxVRWVNoUTNmalarcS2nQjaLyrITFrZTxY6fNG8wysgGJy+WdHoHtU/ghy+WdHoHtU/giPi2Xz0+aN5gltgMTl8s6PQPap/BDl8s6PQPap/BD4tl89PmjeYJbYDE5fLOj0D2qfwQ5fLOj0D2qfwQ+LZfPT5o3mCW2M3JS1xy16o77hjjcvlnR6B7VP4I/DtVkWRMuQJ8OFUwH0m3IdZmHIeUgy0UlCTbSRGZcN4zPTXmMTGUWNE4qq4ujtifSCKZUeL+LNR1Nn3CGmPw00hhpDbaSS2hJJSkvIRcxD9j4aurFVNW1qAACgAAAMK58Z8Y6w9/DuCoEvc+M+MdYe/h3BUD1c28m0+r9tLOvoZ2R+L1p1V33DHFx78wVnVWvcIcrI/F606q77hji49+YKzqrXuEMMv5+nu900amgAAOFcAAAAAAAAAAAAAAAAAQG2gyKhx/UjV/jNTcxGf9PZ+0v/8AefmF+IDbQk1UOPkRGf8AjNTH4KN7+nM+T/z5OcWo5UIlfgACqQAAAAAAAAAAAAAAAAAAAGHJ8faXqE334wpxMSfH2l6hN9+MKcevm7kV/V7Qzr1gldn/AIi4/wBQY9whVCV2f+IuP9QY9whz5x56z7qvvSmjVLfAAHAuAAAAAAAAAAAAAAAAAOv9vXDZZa6lr+Xh8C60yOwB1/t5Le2W2pERn+Xh8yd7+lM+QWo5UDsAAAVAAAAAAAAAAAAAAAAAAAAGFc+M+MdYe/h3BUCXufGfGOsPfw7gqB6ubeTafV+2lnX0OFdsOSqWey0nfdcjuIQkvKZpMiITNTeToVVDjuY3b8oyyhtW6lnTUkkR/wC1FmA6MoyOMoqivFMTHFxXe8Iiq5L90svo3cegz8UO6WX0buPQZ+KKgBzaNjrJ9NycfYl+6WX0buPQZ+KHdLL6N3HoM/FFQAaNjrJ9Nxj7Ev3Sy+jdx6DPxQ7pZfRu49Bn4oqADRsdZPpuMfYl+6WX0buPQZ+KHdLL6N3HoM/FFQAaNjrJ9Nxj7Ev3Sy+jdx6DPxQ7pZfRu49Bn4oqADRsdZPpuMfYl+6WX0buPQZ+KHdLL6N3HoM/FFQAaNjrJ9Nxj7Ev3Sy+jdx6DPxRA7ZcikOUVAS8etGyLJadRG6lrQzKczoRaOc5nzeTXTXgO5h13t1M2cNq5JGZFGySidWZeRHypFJR/sJKjP8AYQtGboib/iT6bjH2Nzull9G7j0Gfih3Sy+jdx6DPxRUAK6NjrJ9Nxj7Ev3Sy+jdx6DPxQ7pZfRu49Bn4oqADRsdZPpuMfYl+6WX0buPQZ+KHdLL6N3HoM/FFQAaNjrJ9Nxj7Ev3Sy+jdx6DPxQ7pZfRu49Bn4oqADRsdZPpuMfYl+6WX0buPQZ+KHdLL6N3HoM/FFQAaNjrJ9Nxj7Ev3Sy+jdx6DPxQ7pZfRu49Bn4oqADRsdZPpuMfYl+6WX0buPQZ+KHdLL6N3HoM/FFQAaNjrJ9Nxj7EhBdm22YV8tVRNgRo0OS2tyWTZEalrZNJFurV5EKFeADvyfJ4yemaYm++b+Pw9lZm8EHi1lYUuNVdfIxy1N+LGbZWbaWTSakpIj0PlObgLwBllOSRlNVNU1TF1+q7pu29yaasKX7pZfRu49Bn4od0svo3cegz8UVADl0bHWT6bk4+xL90svo3cegz8UO6WX0buPQZ+KKgA0bHWT6bjH2Jfull9G7j0Gfih3Sy+jdx6DPxRUAGjY6yfTcY+xL90svo3cegz8UO6WX0buPQZ+KKgA0bHWT6bjH2Jfull9G7j0Gfih3Sy+jdx6DPxRUAGjY6yfTcY+xL90svo3cegz8UO6WX0buPQZ+KKgA0bHWT6bjH2Jfull9G7j0GfiiB265DJd2YWiF4/aNJN+H4TqWt0v8Ka8zmo7mHXe3wzVs3cYTrvyrWpiJIvKbtjGbL3hanN0UzE/En03GPsbndLL6N3HoM/FDull9G7j0GfiioAV0bHWT6bjH2Jfull9G7j0Gfih3Sy+jdx6DPxRUAGjY6yfTcY+xL90svo3cegz8UO6WX0buPQZ+KKgA0bHWT6bjH2Jfull9G7j0Gfih3Sy+jdx6DPxRUAGjY6yfTcY+xL90svo3cegz8UO6WX0buPQZ+KKgA0bHWT6bjH2Jfull9G7j0Gfih3Sy+jdx6DPxRUAGjY6yfTcY+xL90svo3cegz8UO6WX0buPQZ+KKgA0bHWT6bjH2I0pE25ySjc+RZ8JiK66469KJskkRsrSXzVmeupl5BZAA7smyaMmpmmJmb5v4+6I6O5WqcQAAOtUAAAAAAAAAAAAAAAAAAAAExtOxV3N9nuQUcZxLMyZDcRFeVzNSCLeZWf+64SD/uFOADDwfKG80xGpu22lRzmMJccjuFoth3mcaUXkUhZKSZedJjcEFYQpWzi2sbushvWGPWDxy7SthtLdkR3jJKVSYzSSM1kZJI3GUlvKPVaCUtSkuWNPcwMgrWLCsmMT4L5GbUiO4S0LIjMj0MvMZGR+YyMgHMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAdebQk90mc4RjLRGtDMw7+foXBLEYtGiM/IpUlxgyLyk05p80zLfyvOIuNvMV7DS7XIJZf4HURj1ddLm5RZ8eSaI/nOq8EuYtVGlKvzhmKPUi7C1tHm5mR2ykLnSWiPk0kgjJthrXiTTZGrQvKpa1mRGtQCmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABE3WzCOuzk3ONWD2JX0lfKSJMFtKo81ZFprJjq8B09NC3/AAXNCIicIiIWwAOvVZ5kmJEpOXYw9JiJVoVzjCFzWjTx8JyNpy7Z/YhLyS8qy5hVYzmNHmcNUqjtodqygyJw4ryVm0oy13VkXFCv6qiI/sGwJbJ9l+L5fOTYWNQ0VuhBobtoalRZzSdNNESWjS6kubgStOBeYBUgOu+5DOcYSR4/l7d7GSepV+VxycVp5EolMEhaf95xDxh31Z9AZoy/DrekQn51jWIO1gn9pKZLlkl/WcZQRefn0DsQBjYvmVDm0FUzH7mBdRUq3VuwJCHiQr9FW6Z7qi46kehloY2QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAR+Q7W8TxqyOrkWyJdyX/tFY2ubN82psMkpZFr/rGRF5zAWADrwsrzzJ0mVFibOOR1GW7PyqQRuafpJix1KNXk4LdaPzkQ/pbKJN6rlMvyu2yFKk6Kr4q/k6BxLQy5JkyWtJ8fBdccIBo5FtYxvHp6q0pblvdkZl8kUzKpssj/rttkZtlx+c5up85kM1xGfZqtSNWsApzPTeQpuZbOp+zgpiOf8A3+H6J81jj2NVGJVjddR1cOnr2z1RFgMJZaSflMkpIi1GkAwcTwenwqPIRWRlJkSlE5LmyHFPSpayLQlvPLM1uGRcC1PgXAtCIiG8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUybZZimXT0WFlSsHbNlo3axFKizmy8yJLRpdSXAuBKLmLzDHLC80xlJnjuZnbRyMjTXZZGKSRJ/QRJZ5N1Pk8J0nj5/OWnYYAOvz2m2tA4pGV4baVrRL3flGmI7WGZef8AJJJ9JF5TWylJa8/A9ONmHZDYPh+za1zk7dq6oqxTKZXyQtD7zRuPoZLeRvEaTJay1I9DLQ+GpaDskebuzF7Fyy7JOux6BTO4/SqjSlSJ1xNiKXONJI3W2mlJLi2e+tSkqMuKGzLmMB3/AI3klZmFDAu6Wa1Y1U5lL8aUweqXEHzH9n2kfEj1I9DIaQ88di32NFp2LtXasTdoTl9QPtm+uudhEwxFdLQzeSs3FGXgkZGXAj4GfzSHIzPaLYZk8tqI/IraP/ZstGbT0gv0nFEe8ST8iC04fO113U+hkeRWmWVXUcURrkd12OU0tQ6bU63gQnC50SJKGz/6GZDh98HFuktP6+1+IecGKuHGTutRGWy/qtkQ+narH6lv0SH0UZisruO0nwL4ei++Di3SWn9fa/EHfBxbpLT+vtfiHnTtVj9S36JB2qx+pb9EhOgrL559C+Hovvg4t0lp/X2vxB3wcW6S0/r7X4h507VY/Ut+iQdqsfqW/RINBWXzz6F8PRhbQMXUZEWSVBmfMRT2vxDWgWcO0Z5WFLYmNc2+w4S0/wDUjHl3tRj9S36JD8swmokpMqJvQJiPmyYijadT/wAydD0+w+B+UhSrMVF3/G0m/uL4erR13tW294ZsYl49Fye0TElXk5qDFaJSSNO+skm84alESGUa6qWZ8C1015h8dmm0p64kIprpZLsTIzjyySSSkkRamlRFwJwi1PgREZEZkRaGReWtt/8A6cmWbWctscnf2qJtbWWs1JbtK5TbbSNfBbQaHFbqS8hEn+4fMZRk9pk1pNnaRxj17e7XsUobF2sOz+VLpotVVFMyufNTx0LeZZSpSC11LeURJLQzMyIjMs88jz/JkqKmxiJirBqIkzcofJ97Tj4RRIyzIy5j0U+2rjxIj4Ft7M8UXhOAUFM/HrY8+LBZbm/JEVEaK5JJtJOrbbQlCUpUsjMiJJcDLgXMKccw6970jl5vqy7KbnJkucDgIe7QgpLXXdJqPuqWnhzPLcFdjuLU2IVyK+iqYNNBTxTGgR0MNkfn3UkRDUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB1pt2tVsY9X1bat0rKWSXv6zSEmsy/vUSCMvKRmX2H1IO0tvUFaoNBYERm3HmKZcMi+aTiDIjP7N5KS/aoh1aPvs0RTGSU4ds39//AOXFXQAJiVtTwuDKejScvoY8hlZtusu2bKVoUR6GlRGrUjIy0MjHy77uC9Ncd9qsfjHq/Fs/mjxUfPJNpkShuXaqNUW+QTo7SX5TVPGS72qhWu4azUpJaq3TMklqoyLXTmGe5tqp5Emuj1FdbZC/YVxWkdFbHSe8xvmgzM1rQSTIy0MlacTIi1PgIm/wFuwz63yuNhtTtLpcgYjuMPdsR96I423yZ7qnD3VNrIknqkzMjI+Bivx3CZNPtLrrKNTR6ilZxrtE2Yi0ckxIOQThtJSWhmWm8e9ukR/t4Dji0tqqpjVF+zov16rtXbKXLkbZqIsfobOExYWrt4aigVsKPvS3TRryhGhRkSdzQyUajIi8/MPnsdzKxzaHlEqwKQ12reyIkePLZS07HZShs0tqJPlI1K4mZ6+cy0ELjWA5dhDGH3cajTaT60rSHNqSltNucjIlqdbdbWatzUiJOpGZcFac5Ck2fXDGAMZG5msqtxKXc3kmxixZ9mwRrZUhoiMj3tD0MjI/N/01rRa2k10zacUd10ao6e/oHawCU77eDGk1d2mPaEehn8qsfjGnRZpj+Uuut0t7WW7jREpxECY2+aCPmMySZ6F+0d8WlFU3RVCGlMdfjM9tRV8nMimUhhf6LiD3kn+zUuJeUtSHqGpsW7iqhz2S0alMofQR/oqSSi//ALHluydNmvkLSlS1kg91KS1NStOBEXnM9CHp3GqtVJjlVXKUSlQ4jUc1F5TQgk/+B81n2KcNnPTx+y8amkAAPkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAZ9/RxMlppdZOQa4slBoXunoovKSkn5DIyIyPyGRDzrkNFOw+z+T7UiJSlaRphFutS0+Q0+ZWnOg+JHrzp0UfpocaxrYlvDciTozUyM5wWy+glpV+0jHrZBnCrIpmLr6Z6PeP5xne8unDYUZmbDZmfEzNBD+doxi/o7XoEO7ZOw7FXlastTYRfoR5zpJL9iTUZF/cPh3h8c+k23ryh9JGecknb4RvLo2unkpShJJSRJSXMRFoQ/o7g7w+OfSbb15Qd4fHPpNt68oW0zknb4f+l0bXT4/DjDTxkbjaFmXNvJIx3H3h8c+k23ryg7w+OfSbb15QaZyTt8P/S6NrpntKP+oa9AgNMeGhbhk2wgi1UvQkkRfaY7mLYRjhGR9s2p/wD85Q1qbZLi1JJbkt1vbUlsyNDs55cg0GR6kaSWZkR6+UiIxSrPWS0xfTEzPdG8uhB7MMAevJ8S9sWFM1cZZPRGXUmlcl0jI0O6HzNpPinXioyJRaJIjX3YAD5PK8rryu0x1/6jYAAA4gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//Z", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ - "# Entry Graph\n", - "class EntryGraphState(TypedDict):\n", - " raw_logs: Annotated[list[Logs], add_logs]\n", - " logs: Annotated[list[Logs], add_logs] # This will be used in subgraphs\n", - " failure_report: str # This will be generated in the FA subgraph\n", - " summary_report: str # This will be generated in the QS subgraph\n", + "from langgraph.graph import START, StateGraph\n", + "from typing import TypedDict\n", "\n", "\n", - "def select_logs(state):\n", - " return {\"logs\": [log for log in state[\"raw_logs\"] if \"grade\" in log]}\n", + "# Define subgraph\n", + "class SubgraphState(TypedDict):\n", + " foo: str # note that this key is shared with the parent graph state\n", + " bar: str\n", "\n", "\n", - "entry_builder = StateGraph(EntryGraphState)\n", - "entry_builder.add_node(\"select_logs\", select_logs)\n", - "entry_builder.add_node(\"question_summarization\", qs_builder.compile())\n", - "entry_builder.add_node(\"failure_analysis\", fa_builder.compile())\n", + "def subgraph_node_1(state: SubgraphState):\n", + " return {\"bar\": \"bar\"}\n", "\n", - "entry_builder.add_edge(START, \"select_logs\")\n", - "entry_builder.add_edge(\"select_logs\", \"failure_analysis\")\n", - "entry_builder.add_edge(\"select_logs\", \"question_summarization\")\n", - "entry_builder.add_edge(\"failure_analysis\", END)\n", - "entry_builder.add_edge(\"question_summarization\", END)\n", "\n", - "graph = entry_builder.compile()\n", + "def subgraph_node_2(state: SubgraphState):\n", + " return {\"foo\": state[\"foo\"] + state[\"bar\"]}\n", "\n", - "from IPython.display import Image, display\n", "\n", - "# Setting xray to 1 will show the internal structure of the nested graph\n", - "display(Image(graph.get_graph(xray=1).draw_mermaid_png()))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Stream\n", + "subgraph_builder = StateGraph(SubgraphState)\n", + "subgraph_builder.add_node(subgraph_node_1)\n", + "subgraph_builder.add_node(subgraph_node_2)\n", + "subgraph_builder.add_edge(START, \"subgraph_node_1\")\n", + "subgraph_builder.add_edge(\"subgraph_node_1\", \"subgraph_node_2\")\n", + "subgraph = subgraph_builder.compile()\n", "\n", - "Now let's see how we can stream from our graph!\n", "\n", - "### Define input\n", + "# Define parent graph\n", + "class ParentState(TypedDict):\n", + " foo: str\n", "\n", - "First, let's define the input we will use for the rest of the notebook:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "# Dummy logs\n", - "dummy_logs = [\n", - " Logs(\n", - " id=\"1\",\n", - " question=\"How can I import ChatOllama?\",\n", - " grade=1,\n", - " answer=\"To import ChatOllama, use: 'from langchain_community.chat_models import ChatOllama.'\",\n", - " ),\n", - " Logs(\n", - " id=\"2\",\n", - " question=\"How can I use Chroma vector store?\",\n", - " answer=\"To use Chroma, define: rag_chain = create_retrieval_chain(retriever, question_answer_chain).\",\n", - " grade=0,\n", - " feedback=\"The retrieved documents discuss vector stores in general, but not Chroma specifically\",\n", - " ),\n", - " Logs(\n", - " id=\"3\",\n", - " question=\"How do I create react agent in langgraph?\",\n", - " answer=\"from langgraph.prebuilt import create_react_agent\",\n", - " ),\n", - "]\n", "\n", - "input = {\"raw_logs\": dummy_logs}" + "def node_1(state: ParentState):\n", + " return {\"foo\": \"hi! \" + state[\"foo\"]}\n", + "\n", + "\n", + "builder = StateGraph(ParentState)\n", + "builder.add_node(\"node_1\", node_1)\n", + "builder.add_node(\"node_2\", subgraph)\n", + "builder.add_edge(START, \"node_1\")\n", + "builder.add_edge(\"node_1\", \"node_2\")\n", + "graph = builder.compile()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Stream normally\n", - "\n", - "First let us examine the output of streaming normally:" + "Let's now stream the outputs from the graph:" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "---------- Update from node select_logs ---------\n", - "{'logs': [{'id': '1', 'question': 'How can I import ChatOllama?', 'grade': 1, 'answer': \"To import ChatOllama, use: 'from langchain_community.chat_models import ChatOllama.'\"}, {'id': '2', 'question': 'How can I use Chroma vector store?', 'answer': 'To use Chroma, define: rag_chain = create_retrieval_chain(retriever, question_answer_chain).', 'grade': 0, 'feedback': 'The retrieved documents discuss vector stores in general, but not Chroma specifically'}]}\n", - "---------- Update from node failure_analysis ---------\n", - "{'logs': [{'id': '1', 'question': 'How can I import ChatOllama?', 'grade': 1, 'answer': \"To import ChatOllama, use: 'from langchain_community.chat_models import ChatOllama.'\"}, {'id': '2', 'question': 'How can I use Chroma vector store?', 'answer': 'To use Chroma, define: rag_chain = create_retrieval_chain(retriever, question_answer_chain).', 'grade': 0, 'feedback': 'The retrieved documents discuss vector stores in general, but not Chroma specifically'}], 'failure_report': 'Poor quality of retrieval for document IDs: 2'}\n", - "---------- Update from node question_summarization ---------\n", - "{'logs': [{'id': '1', 'question': 'How can I import ChatOllama?', 'grade': 1, 'answer': \"To import ChatOllama, use: 'from langchain_community.chat_models import ChatOllama.'\"}, {'id': '2', 'question': 'How can I use Chroma vector store?', 'answer': 'To use Chroma, define: rag_chain = create_retrieval_chain(retriever, question_answer_chain).', 'grade': 0, 'feedback': 'The retrieved documents discuss vector stores in general, but not Chroma specifically'}], 'summary_report': 'Questions focused on usage of ChatOllama and Chroma vector store.'}\n" + "{'node_1': {'foo': 'hi! foo'}}\n", + "{'node_2': {'foo': 'hi! foobar'}}\n" ] } ], "source": [ - "for chunk in graph.stream(input, stream_mode=\"updates\"):\n", - " node_name = list(chunk.keys())[0]\n", - " print(f\"---------- Update from node {node_name} ---------\")\n", - " print(chunk[node_name])" + "for chunk in graph.stream({\"foo\": \"foo\"}, stream_mode=\"updates\"):\n", + " print(chunk)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "As you can see there are only 3 updates made to our overall graph state. The first one is by the `select_logs` node, and then we receive one update from each subgraph (note if you don't want to see the `log` update from each subgraph that you can set the [output schema](https://langchain-ai.github.io/langgraph/how-tos/input_output_schema/) to exclude it). What we do not see however, is the updates occurring *inside* each subgraph. The next section will explain how to do that.\n", - "\n", - "### Stream subgraph \n", - "\n", - "To show the updates occurring inside of each subgraph, we can simply set `subgraphs=True` to the streaming call:" + "You can see that we're only emitting the updates from the parent graph nodes (`node_1` and `node_2`). To emit the updates from the _subgraph_ nodes you can specify `subgraphs=True`:" ] }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "---------- Update from node select_logs in parent graph ---------\n", - "{'logs': [{'id': '1', 'question': 'How can I import ChatOllama?', 'grade': 1, 'answer': \"To import ChatOllama, use: 'from langchain_community.chat_models import ChatOllama.'\"}, {'id': '2', 'question': 'How can I use Chroma vector store?', 'answer': 'To use Chroma, define: rag_chain = create_retrieval_chain(retriever, question_answer_chain).', 'grade': 0, 'feedback': 'The retrieved documents discuss vector stores in general, but not Chroma specifically'}]}\n", - "---------- Update from node get_failures in failure_analysis subgraph ---------\n", - "{'failures': [{'id': '2', 'question': 'How can I use Chroma vector store?', 'answer': 'To use Chroma, define: rag_chain = create_retrieval_chain(retriever, question_answer_chain).', 'grade': 0, 'feedback': 'The retrieved documents discuss vector stores in general, but not Chroma specifically'}]}\n", - "---------- Update from node generate_summary in failure_analysis subgraph ---------\n", - "{'failure_report': 'Poor quality of retrieval for document IDs: 2'}\n", - "---------- Update from node failure_analysis in parent graph ---------\n", - "{'logs': [{'id': '1', 'question': 'How can I import ChatOllama?', 'grade': 1, 'answer': \"To import ChatOllama, use: 'from langchain_community.chat_models import ChatOllama.'\"}, {'id': '2', 'question': 'How can I use Chroma vector store?', 'answer': 'To use Chroma, define: rag_chain = create_retrieval_chain(retriever, question_answer_chain).', 'grade': 0, 'feedback': 'The retrieved documents discuss vector stores in general, but not Chroma specifically'}], 'failure_report': 'Poor quality of retrieval for document IDs: 2'}\n", - "---------- Update from node generate_summary in question_summarization subgraph ---------\n", - "{'summary': 'Questions focused on usage of ChatOllama and Chroma vector store.'}\n", - "---------- Update from node send_to_slack in question_summarization subgraph ---------\n", - "{'summary_report': 'Questions focused on usage of ChatOllama and Chroma vector store.'}\n", - "---------- Update from node question_summarization in parent graph ---------\n", - "{'logs': [{'id': '1', 'question': 'How can I import ChatOllama?', 'grade': 1, 'answer': \"To import ChatOllama, use: 'from langchain_community.chat_models import ChatOllama.'\"}, {'id': '2', 'question': 'How can I use Chroma vector store?', 'answer': 'To use Chroma, define: rag_chain = create_retrieval_chain(retriever, question_answer_chain).', 'grade': 0, 'feedback': 'The retrieved documents discuss vector stores in general, but not Chroma specifically'}], 'summary_report': 'Questions focused on usage of ChatOllama and Chroma vector store.'}\n" + "((), {'node_1': {'foo': 'hi! foo'}})\n", + "(('node_2:b692b345-cfb3-b709-628c-f0ba9608f72e',), {'subgraph_node_1': {'bar': 'bar'}})\n", + "(('node_2:b692b345-cfb3-b709-628c-f0ba9608f72e',), {'subgraph_node_2': {'foo': 'hi! foobar'}})\n", + "((), {'node_2': {'foo': 'hi! foobar'}})\n" ] } ], "source": [ - "# Format the namespace slightly nicer\n", - "def format_namespace(namespace):\n", - " return (\n", - " namespace[-1].split(\":\")[0] + \" subgraph\"\n", - " if len(namespace) > 0\n", - " else \"parent graph\"\n", - " )\n", - "\n", - "\n", - "for namespace, chunk in graph.stream(input, stream_mode=\"updates\", subgraphs=True):\n", - " node_name = list(chunk.keys())[0]\n", - " print(\n", - " f\"---------- Update from node {node_name} in {format_namespace(namespace)} ---------\"\n", - " )\n", - " print(chunk[node_name])" + "for chunk in graph.stream(\n", + " {\"foo\": \"foo\"},\n", + " stream_mode=\"updates\",\n", + " # highlight-next-line\n", + " subgraphs=True,\n", + "):\n", + " print(chunk)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "The first thing you will notice as different is that we are no longer just receiving chunks, but we also receive namespaces which tell us what subgraph we are currently inside of.\n", - "\n", - "If you look carefully at the logs you can see we are now receiving the updates made by nodes inside of each subgraph, for instance we now see updates to the `summary_report` state channel from the `get_failure` node which lives in the `failure_analysis` subgraph. When we didn't set `subgraphs=True` all we saw was the overall update made by the subgraph `failure_analysis`." + "Voila! The streamed outputs now contain updates from both the parent graph and the subgraph. **Note** that we are receiving not just the node updates, but we also the namespaces which tell us what graph (or subgraph) we are streaming from." ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "langgraph", "language": "python", - "name": "python3" + "name": "langgraph" }, "language_info": { "codemirror_mode": { From 5acb64c08c3885e0a00342fc854b1e2e0b62b240 Mon Sep 17 00:00:00 2001 From: vbarda Date: Wed, 29 Jan 2025 21:29:27 -0500 Subject: [PATCH 06/14] streaming from nodes --- ...9-ba06-48ba-abe5-e72df24407af.msgpack.zlib | 1 - ...a-96e3-442f-9924-0c99f46baed8.msgpack.zlib | 1 - ...ming-from-final-node_68ac2c7f.msgpack.zlib | 1 - docs/docs/how-tos/index.md | 2 +- .../how-tos/streaming-from-final-node.ipynb | 351 ------------------ .../how-tos/streaming-specific-nodes.ipynb | 211 +++++++++++ docs/docs/how-tos/streaming-tokens.ipynb | 20 +- docs/mkdocs.yml | 3 +- 8 files changed, 220 insertions(+), 370 deletions(-) delete mode 100644 docs/cassettes/streaming-from-final-node_2ab6d079-ba06-48ba-abe5-e72df24407af.msgpack.zlib delete mode 100644 docs/cassettes/streaming-from-final-node_55d60dfa-96e3-442f-9924-0c99f46baed8.msgpack.zlib delete mode 100644 docs/cassettes/streaming-from-final-node_68ac2c7f.msgpack.zlib delete mode 100644 docs/docs/how-tos/streaming-from-final-node.ipynb create mode 100644 docs/docs/how-tos/streaming-specific-nodes.ipynb diff --git a/docs/cassettes/streaming-from-final-node_2ab6d079-ba06-48ba-abe5-e72df24407af.msgpack.zlib b/docs/cassettes/streaming-from-final-node_2ab6d079-ba06-48ba-abe5-e72df24407af.msgpack.zlib deleted file mode 100644 index 2568b28c0d..0000000000 --- a/docs/cassettes/streaming-from-final-node_2ab6d079-ba06-48ba-abe5-e72df24407af.msgpack.zlib +++ /dev/null @@ -1 +0,0 @@ -eNq1eHVclN228CAN0t1IGMCQQ4NIDh1DlzgMAww1wAxdCioojRKioLS0dCMhKSHd3SANAgLyjfq+x3Puuef+7v3j27NjPWuv2mutZ+9ndnCOJ9wdhUC6YBUgXNBwdygMjXlAxQbnuMPdPOAo9ONsZzjaHmmTAVY2yPBwR0xeY7NHo11RUgICznB3ZyjChh/h4iiAcLYTUNc3Eocai7raKCHt4D42ztZgSRFTYXt7mIueiw5CQVxdWFsI5mLmpO6LtFMX9na1djayhyFsJG1NBJ3U9VxdYM4qXlBFBUOIEkpD0UjSFiYCwfBCbE187K11VeV5TYQkfW3AKj42KpK2uoqSXrpOgq46DkgxM2cnH5iInoaisYqLmbGooAb4r1EPqW4DlvS0VtVC/jW6Kto5OWNstLdWtP9rdPylC0PvaCIkgTT29cbI/cezhKYIRq++o5iOgyvK1ERbEMNvayKiLWhqoi6ImbfT1BfkVftLn44XUh1ji4epsddvvMo/5Ihj+ASthSVRMB95tKYByM7U2MbJ2kXvJ97eTNjIw0YRg0dgeFQhntbC3r4/Zf1Fg5EFQusiFH7b+9M3xto/bfGFgI2c1cAQJzNnFSFrVT07jG6UtRLSwcxB3VkLbOasCf7pY0M0FGzkaiZsL6jjoOyhZYeRK+xtDxPRVjYzNrX7y3e/eRWFPGFgFQeoCURUx0H+v+j5yfNHh6mzGULb2cxB0U5WVs7aThHphHSXveFlj0DDM62RNj5NOfZwqA0mtR7nGaLg7kB5O7gLOrbS1QeTSy7Av3ILJSDML4KpxfIwGNwVDVR2gSFtEC52sYV2vghXPnYbuK0TFA3P/j0dm8EjwJOniHRxgf9K0tg8RzjcFQh1QnjC37vDUa6YvIWHZKPQULQHKjgTIwre05njDEehoHbwdB2Nvy2KzoXAXZHuaKABMnbi0o8D7mLjisQkP4pDytyPw8PdiUOK43eKWwhYCED5XeBO/DAnpIcNxhh3OD8M6Wwh4P5LgoWAJ0gOJSsirySojpQw9gRraJuom0CdnAW5hVXAWtY+goJCit4GivKOCEUNtIuuKLewgpiDs4mblr23l68CEiEBUtd3cMNgIV52Zt5Cthg2D0yTFMF0IggRCMJaEyYuqAxS04PaKcrrCmMoYS4wBwlvDKBij7BXkhT3UoKLKblBnIRcTGFGqvq6XtwiSpjKEWDJx2HnjvRwxawGZgvELIKDj8MZ6m2FcQaHlJggSEJQMKBQEQqzhwMxPkW7I51iizHzQMy8rLCYuARIULDwr7hAoC52cFRslrUPGo7KVMKEJLbBwN6Dj11YjF0f7souLCgMYhcSkRIRlQIJsYO1DIp+SsQEHKgJd7FD28dmSgoKStb9lIZC/a0NKO/khPQC6rgj7BAusWk82YoqQIi8aWy5BExSBCoibgOHCgsKQQXFgcrGkIK/BRr4uMJj8xDOGCsFHFzhdtn6cHfMDhab9ydCGdrKmrETMn4cKI9fCq1s/9rVOKQE+Th+R84Kjfyf/PIfsqxQE4pCAzGbCcIWAbfB+MADjvEB6F988LNK/vRBMWY9v92r/ysjYzNU1QwyMO9BbLaQOEgIJJxpBHX3ia367QA+9v/yDvx6ix5hstkd89DGKXQ1fjUPIFdXUVMBYGEBAFiYH+BqEWtcTVHRSheio6Kmqfx7ohOAKTiCAIAzxs0QsAK7iakZO/78b46fBQpDuQL+c8FQfRv9TTsEBPzfC6ENHAXDjHuYxuWOUY4RSY2Bqe1+w7d/wta/YZmfsBfaFY2BdX/C7gYQRQxsg4Fv2/0TbP1PMMzV/Sd9BAaWwey2sD92A67DXQz1MSMBpjEDUAAIAAxQ+Ln+3yTIdABA4hgAwI75g7N+BQBUPQEA6Cf/4LjeAQBkjwGAyv4/uD8+cYW6Q3+hcDDtmq0tAHCQAwCQmAIAVF8AACLzvx3xH2xj/2UbGIDE/OwATgA4BqMGcAHAAPwYSBggCBACiF1NARQB2Neu/ayYgoOpuAS4uDg4uET4+HgE14muXycmIiYmIaUkJyGlICUmJqclp6CipqGhuU5GR09LTU9JTUP9UwgWNoYHB5cQF5eQmoSYhPr/XK6aABQEWFqAHmwsDsA1CixsCqyrTwAWzJKuYWNhYWH9I+64eDj4BNhY1wgx08bkAIxizALwiInxCQmwAb/IcXDxKPApb1AJUXPI6xm9JKChpeMUg7qhB4aERcQNDD0exZeWDyq4x75IK2slpL+pBNE3ti7h4lYODnncPBy3sIcRzIT1d/mTr9ewcXHw8DGT4hSYHMcG4OHjElzD/sckBQ7lDSG9WGp5CNStZABXJC6tmYqGU1jB+tECh37L3v7VJOA6NsY8CmwKgBzgR246xzUOdo7bdSsro824siC80hUjjTu5J92E2PBdr/smU0OvJmXnARar5ZvJtrmfCMUlhLL6bhRi26TOZ2R2FexLVJyY0Ur4sECagjbTJ0Ig06yT4RdPrwCGH3bMnm4mCMvNNJeORp7QoaTrXKQg1z8VuAkKJTj6MUrhVSYI4n/jH69gWrVrkQ24URg9VpsvsPKczNRuNHWz4GPCbVn4WzO/+NIf4HfHtgFqO0afz1hDRl4d35o/uYgM/f9EG23meFxaeq/bL5Jr95Z8Sd8Y1Xcs003GjOsdWxxC/daVvQw4tGpqTKH3fzzmI0OJ0o+1Lg1pjIvnfz3stroe0s3BL/1s9gHZrZQM/+Z8h8KXfbc+jn3XClfgk9YaJO7FMURfLESP88XWF0c2bfSg/KNwyMExwWV0hM+y3W5zcDwMCyMI+7sjKo5w2IqxoUOAFrujHRFXgOXn7JI/rOHo15z1oq/PY/MVCqCczbad9iwl3LKDCJBK5USFiOph9GPOQM/YZSvlj10W7Zdvyq4A4UxZdkrKhxmUu57pH8zyXbwYk94XHuk2pqdn27mX3h4fXJPt6ATgvDgASr72493nvUVYfFYp41CKZ36dj7r/XX72GjpnftpnYRzK5BpYo13REFMZGKQs797szqiRlZd+7pw6J+MhVquC0jJmiGweUBETWyMp92BR3ZQe4/ts3S/Cj7dvRVllXuqXIKqhCN6B1sDJgi1tUCyTmE8UvFqjDypNHQ113JOrgZUh71wuAluc6GeKztyKq/W+TM/x3mEAJlQbdMKkAjfOWUfe+Uw3geRnw3hHNzrq8XqjYFWbb6LIHITpkrY9dJX5evO8YW87v7THsWRL4MUQuq5gJz2sCKI4/Lzq6Naaz1G2T9n7SLu7cnrja2VtsnlZPiPHnqf6KBbIlXzD35OcOBnLszzXgqdZ4Mgzimgqf99vhqHEdqxmi6N66C4Y8ul0y+apKdd+8mrqJDsXKVBAipQbb8auNIgtMdwzQkyKqSrEohBYrWyxJesd5HA2Qe1Wy4m3ZPvGYoNV0QzN0m20ZiYn+b4aereLl9X0pqxvvUm+26uCp+mfFb536A+fWrhXVThi8UFdLlBDsLxZAoKw4GLFfJUAc6VesdQNtYXtB3NqGShY/FxWcchR+Vuecp5PkXuz3Po1dRwziPhSFvSM0kBC5Nmx5s7b22c0KQF0HzX7LlQC3/LxhYU1oikosCjI/+7u3X/Pc5s/onHq3d6zKTqoqeZQjoECIcrHPyWD+I1542PZpexmqvUqZX2lFcOtyDKGxadS36UjaUgV7WKXKjILH5V+kM0kZ2PqeP4txbsIEDKLZRh06LggdyHe9tFA/oJnxqO10Mu3n8RfILvanowk/dXRoQTp/NqH4Hmp06LxTV8Pk/uLNwF4NierIC8ygsi5yfWuHyNjRiPmeR0Rvr7yJWzReyKh3imLydvw5v1zrb6kFyRwlsrbWRExZj4Wo8FihGxCPHt5PxYdZHk+BlmSTdyqTo5WL54bU/ym9s3zPOiOELPRY7Pi56PnRUytn1c2Z17Okjgzs2Qq+ED55gRZwLYyPzycTqj0NgzPv68Hdt3tm0Ou3OR83nfPlT0yZJa26gog1kh1BWh1Mj5WYmspuPs/6NGl9GM61QdF+Z26hTfscPehCgUp1Xa5DvKd+86Uozq0kDmRpWOyj156dgfg12S8Yw/iDsme9iV7e2/B7yk6JME/6lxaaruRlmNuI7JQ7otBViJT5VJJ51QAJzs6U8pUTSABIh0+u2hxeFD9si3Ps2dt5fBHZLSMey93HtGzvNSuIsPSU2WLkt2XH1+CROu2ukfLQf5Tu9wa6fyihqJ95dn7FLTyf6L+jw6v4du9GAnxl4mU+MGRW2fbZwrjGvfP2b12xV9ume/pnZiNzVT1Dzhx4SjdU1L6jx3j4QSeePOb/PfIy5cesq0dDuOO9/3Yj3dF4pHv9vSW5w4mKxl/Hiwc/213WzOa0yPl6Kj8ZUvtBoR5HfH5XAyvyqLf78jHTAz/nhoXgMxS/twvie3ibvtdnetJV4DHjckg361I+3bpVTriWh5wVU3oIq2c+euvQ1PTrfvEbIW93xwDaBuF+q6D5MrKJm6YNvvh0B+N+yc63gerrip7OXs/K9HsCD7tsb2F1Fpm/4pMOtoYczlnqh4y7fjB0JY0vCRsPXwsCmlT0+G2esywaBvqxNLZe+3HQO3YgXhjAw9r510qy8uYC+m3NPX6puuhSBYtUbQoZa26j1ngOjb97KTFoEyfsYV2HMbUJdp83PQh6+xK8zsZmTlhhr0DEfb2E19rU1/f6vbUTx9rl2x+2qOs+u1E8EKhYwsVkOznlxR4A3Zc09grBfSHvaG3HIC8DvqcIdNfMZFtvwvjzXKM7tFUCrpc3jUyZ9kkvmyocBXdP+2LUoUzxt1hRK9KeDDh+BDip+EEE664t3kuidTVukPZw8KD/xwnf3XBsV5bn5IaNdTGuvbmfgS/1PMKajEISpZrh4SPvP0MDu4MC8ccRNf4eHs3Q5grDKd34mPMt4djX9nl5caAmTVZdlDR92hhFqxf5j0Jxm/fLeBnefu4Gz/Gci7tCpBGeAXI5RV9sLe1O6GzFhq4e81wJ/+HzGTkYHU6xyHDvyUiLsfuqXr0Ifi0W0955gpAqPfD/grAoC5gFARkJQYKddR27Rpf2FGU1d14faMwKrtSyhG4N7FADp6oOBg2EC0KDEltGq3IgXfANCDmp5Ex09WWLa1JNl5+9+VkF7ccOwUS8kIrc/tz4rzXx88W7b0de3zRkST8QKYxIUuBN65l9IsylWfF39YMDcw/94oq0ejra9B7Y21TKn2rrmgiKU/VT/uArpgjv4w60mlz9oxgYysdaga7Jy/MG45JP8HmRBcAg6BA3rO0CVkTLOjWYrBndY+2e6S8mn//Ysn8aSTQedW+tO5yx5535+TVdhVRN1EQY2zUeyHV0+sRMt9hjGsE390PJV5zfx/5VK/IWjqJfEUDL4pbS566vQ5Pa1SzssTl6vWze6qe2MwQQ+fdu+YYNgfxkTenwvsmBxYgvdle/YzLR8QSHdAdhzdqqA6EaVG06RhyngfRad+4aJ4jWPjC+6Qu+NQjr9elQn+SeVI6YFB6+Jp6zbJ3gd10zwbzwUnsFUCSfd2JhuMV81I7EU/tXbZzDsENpFd+fGqXU9X0OklAN2ItrJf5qWoK75v7p2dp+LHL0fRRKyP0RpJaz5fKy6njE4lOAds4J1JnjrYyM4HnvG8WZVp9sWtgw65bzAVFXTybQLdhrdjBNmUPKtl+uIFQxVcYMeEKQAwcTU1y6DEbfDk/TvIilFbHgY96CDJ1rnj7uW9NnqpMIRKsfiJBIvoOxEKCJtajj4WayEidf9M/W5iraypLICNtvHz5w9trZw848r9NsEDh10Dz/NxdsVWwqCVIINgMkfl1CqQN9AJ7ifYCx8cHph6Nnj3GvansV0iZtFfXRKG6/sYKyCu2mkI5i3jafv5Gio3jGNHWfedTe2f7RNihI4NJrIKfc+CR8NaWgZVL6pRVw/Xa7yN0U4UOgeunGcMtzjRkcot13pbhhss+KnJu5RO5GjgVwjqql3QSou9Ey+go1ViaznRceVyiOeqm7efzsj8V6Q/pMMGfm2yuh9/vIAnFnjrGIdfXPwG3sWbTUuv+0ZBTObaryN9YofxtI/c8YzFan7AipLX4I1wuleW8npiiii9Yv7tHpfZW6pqB4WBFyBpffMd3RoNE0mQGa/6UF/ozDjYBls5ADWlJaGwfimBlYh9sazZ2zWy+zzaOdEy6jNo87yZMpMXaP/JLBce/e5WuQ0LQ+RPIp/v0BA9UvXvPad3pCqBQDBm83qRo6Zbt+HVnz7eWKcCZMyNZ0mTxWy/vfU+Wxt7eyF7SyIFQFjn5R2dxoTNtcpcL2t9gh618Ima8G00exi0PxzrFAkl6oLKvQyMm0N8iafN5fh4VyzoHT/ai7y620ynwefGKkt6AO+ZvLRS8f1VBI/Kis1eoRDKrACHF3bS9s/EF+PkrrOlQm941eOT5mjix8+CGIyp3I6/whQYvizoIRoqOgK66TM9uu1c0+A/WBwxVK8d88tHIsdEVlIhhPulpB6EPE7G+GzVjDjdcCv/2+brD4ZZ9ZH1lqrqDY71gyQh1r1/SGl4iKHhwfYk71NmLfVyKyJJcoYyDOpFObwYWNbmR90S00bhncHIRJowIqZfBU6tkbbzB+SA36zSBfs3eKcIpkpQUqxX2IJisjwDHP81oyuiCNkJ4xeSsZFLdem2Wn4b5UMFPC6/bR2WcfmGvSPqauBSYqiqmLP1fzs+DvJhWLb6HJVkR/HiK2ZFkdw5wyC0iuney+gX77au0xxS5RaE3zSolt2NLhUItSmLwebv2m7kLhcpfUwY3m4wuVBqHTt30P4od1pB99agcpcGpuJxA8aN1dk2GJU6xUu2Wg8fPWFOLmb4ctKb1MjaoOOxcqyRyUMVbyUvp0GulP6jp6P9CUvMJRHmt+G3NhIplhLXBzmpkvNAc/5OcXfTw7uqjHHBaXFbJOmuUk4hPQrOhPxldfsmgkYUAZYd+7EwWjTEDlHtAqDmtIBi4HpZhT4KU7/GZvYeOmkn7nWX+7BLkKs3r8lZ6ONiZiQzFLBnL78uNdLjiRgxHBYnw617EiYUCLJnm+osZ+hxTR8vdyTnfmpwQewBng7Of1iYzTvIkzVw/GqF4xhkfxYiyGPowL88e8uX9tMKKbHf3kLGloltRTrKZ2avtcwrPkoYe1xWuhGrHdpe+qA4DKtW/D8s0JEOlbPQIkWPKwQwyl/N1SW4ejK1Y7o704AzI0GIxqadIONmKgBSH4oNfs2FQAJ/04GtwUcdxXj/l1qlzmz/l4TOrA2mD0fxMloi0+P2KbfZQoW3p8+yGQtc5rXInigGP9ZM6qm5jI2+RD6Q3YHwgb6dXKnxfsyMHB/zUEPlR4h8ULLAZNFX9fGAPphTRII/Fh0bNVn0CUB3cdW/er88+bx1IzxRpBKRGDJm5pFI200BftS5wSo9+cnZ13UUERn+eTf1u4pUlFm0ZvaUd7VcikzmSSqzOU9WYwLKhlTEG+z7W4WZMAtOI0rf/wiRVEHEFOEhhPbH2UcuXoarK76WVdcitsbCjHjboWYtTIPxyAABwf0oBfCHkeojFxdMuAC/8ODMVvwzKTihM6ymfXJYWW2cG8L9QCtFjP88hjUBQlKeonjmOJ+jZLcb/iNs1R6fv2d+3KR230m0V6/g+PbBrbclGe/c0ebuakNmcdcQiLz3vFk07FT0FBfTn56WJl0YbblRii4to+RH1RXNKoWZhZH1HtVPq4aoRh8Obez3KRieDrQ13yn2/Bpn5bUBnX07KesoVev3yqdlIKmHdzLepSPObNZY71An5aOaGQkun7af03+ZHD2U/YVWR4/X7YOlVu326UD+3tHRK6ykxr+jOXjL47qAhanPTuAw+Y0I6u/dF4w0WvtI2bF9srXX2zjzdV+3tFxMzip5ZHtIzC7EvmOrG95e8KpqL2HgVluYOglzd+1vqW3k+i0gXFkVtKIISwvVv5kWgQGxPcm37jardK4pYCAT+DgXZ+vN/isXQnAbpUfbAVsklYmDX89bL5LJmhLJVlRs2fd1Ih4ZyzORmXqaUjZBh+1r92UN8uyPeayuWNHoSRUKLSU1h4QQtDUZSc3ZFeQ9w9+S5CvH7RuPk1vMk4rnQo0R3AKup2Td1ZPo2zzJR8qhpHYiaXEZdNe9dw33r/SPJjw/yvonuu48cqX58IKDQ/ll/sUGNS8n8198GPGD3lJZnRlxynhCXzhnaDKy7GjtShK8k4YFzTarggnRDdou4ZMxQriI3NerW55sVfTt6VY5dZWNWPAf3ZfgumJfeyod/M3LZLpMdYtVfb9BQ5xCQxMTWQCxhRiR8uQK7IqBmADKETxKWvP6K87N/W9uSXf3Tcp9rjTwkZUsJ0KNOOY8nAbUCW6m7BaIsiHIly1e/DiDY2NRG0cs4fcOWiXnvoaT9y4we/08exLdCxxh2kjxK07ONteQ4M/yLz3MSJMqSLnISHRc7FpCfzL0Os3+cbVpOWfvDuo1zHZWUV9MFeE5LvL8zz30w9GM8rXH4zmX1gSb033F0GXbT/dEDvq9GSGNjVNdNJmxJuCNQNYopRQ9TWDwO1os8F5CspeF2uEiGVB+H5+mpcbVFRKaFx+kOyROXH+JSpaBkMCbTHHeVMRTrA8kwpOFAfurCPK0lP3bcvP60U4kJi5XobKvn4+7NTgct14JnZZ/MGLrHEkEJ9XGiX+5Ibio/auxU23fI771Fvh8SZqhrCv/YNl+FN82Zm/2subSi+LptluOb5q5CVrcc8vo7p3MBqekJAy8WhqBDNrS1tfncml5iI5K7z1lHBtQZ25Pg12Xmry0s1rWP61Wl4sCZLarvb1WHm9qmoU+3VeTk1aKlTDNCq1CW3BpzvCYaVcy+DxgBLC9my5ynb/eVXuQzVq8df7sJgRl46A4Kt9j6bfHcejhU6+OnUq6CA/8O86mtP5259naZfZ9fYlaoiN7VbIKmcOK+XvwEg4hd7cwTTi5/XaUNyRy/+1a8PSFaA/xDzzg10oCHOvryWqJaQhm5cTcbzdz3fnQtXzPtkWJhiYwK1XibJwF8Kx1My8o6rDtfQyPbGKeBjrP0584og5l1m4WdmSx2JWOHPzzYil9w6SFjy28TfI2b0LjCvVg7I4/LdUJY1Q/5psK4Qdol7YZzzHW8qV6+VqBdzEobSOrW/67tnHfCe69sDxUVgCcwYbRb85b+IH7TTI/CpxNz2L6buitpy6YZcmJieKbr/Ja5jCxb/ryAd2cgyCr0Iu39zJ2K/gvFWviZQKkxY+uN5+mJtMEj3LPGU8hBXQ0JKnmGOHlm/z6KxIQwn6YgcSHXMZ51XKX3TTuU+4OhICp9Ygv6GeWUL4nSVSJqhtcLXZnr69+USFD1NsQOdYsbwBM+lO63r0Xq2Rx70G8HtO/e9Z1GVObMKSu5dXGDuJ/iN94kWJx6Xd25bnGCYtmwLKi3brBg713n2b+d695jod8FLEn0lEpOv5NkxRyZ50fPOCOm7WJL4s7643mNXlP4xcjx0PPP9jYXKjmfJ6+XOkHA14VqQj+d7yx5hk6vRzMEesc2Ma0iPBdlLUU9KmKWr7V/V3fIUV3PBgmrnt3Is0eqZq46+kOW0tVYQG4/dHkkP5ZKZcjMh9KYIr+WfH3NnJUdM7x00m6s2CVyAW41JECs7nM5vn6n0mg4vPW6KLAj4iMwPGxxWy9yYirRBOG8L29HWStFuPH2Rph3D9K+g1DMKpviHTZpZS1Rk8vTLgWkc6bzUgnyyXN8yeFFS++ZV96kK7PHJRV5qFGRUTniQI6pjyniJ/2mxGxPPlI4z/W9Pl8rtDyhWmI1FICMhoX3Kt3/9x2PV2s1coxJ2YOTgCyVkAPnjDgZfAXgSXtmBNaDZjxmVU+2N8VVIl0/tXG8h2Im05X/EZsuRd5AzOSs6VUMgVLQfvl1F/JnszNO6dvQss8M0DJIl5j6Qnoi2/HO0Uyq5lMMtkoXF9Zw7nXKMdkX10t1R0TKcoOjbmqwnnT+0Kvc/ZwxFMTRO5Ng1wjrDjDOlQoL78NswDR4E/ufBb/bsNW2FHc/lXQ2bdvhCZmQWl7Pyw3ve5xWIiyOl7poZiImfTTv9/wcbTXGMmfiIDV0O6xO+NenIl8g7YVyVZAUcjc1kThSNC/+WBQlRmA6u+ajE4M1q3aANXxGa0y2PTT5YQsuq+nyNBCsepoy23JCd/D2JQqhUcenE+Gjfsx3O6x+mBq4RNRxwPhID10y5SLyqJs7LzMK/Gh16pGUvmqFloeeZLbbzRXz2eLrR9pd3xsFFnR860UGpG9tnn58YqbnuUFXh+ozFmkzOslvlaUqDSwOP9X/Q2v0UjvWr0pmuWffsGFzr1/p69v8xR/Uf0gNs6gy72/3S4rI6HoWMDGXGFRmzzyo1DUQ9y/Th4CZphovycXEcijoePLzanr1X85ycDRhlv+pZtIeaZCdvADWsdRW9YLrU9nYdrgN4DjD00tEqJJokH8slP2HVhqE8twvXRb9NP+0GgMXqr1duurBlZvFrBWGSe68jykY3i8G2THBh0xmQ7Ww37HqCE9rWLk6vVBL/Lp7eavG5wowe5E7KOC59uZV6p3o/16V0i+3aPZT41YB2vlGnPLedTgbGGpK23zRU+moWnVlIgqgcM+rzX8zJikSS0FL/t/fwuEWe4MIMj9/m9U5Sl1Z29pruDRqMTsMAa5kfr6AzKzQcY593mtYF2P8jxdxv7qxeiYABP4dEJR5wr99BWg4M/hIw28qmhjqdf6USUvZ/AUEUuvLY/4YGcfwZo7+Q0WU4zOU4sOh0AhwTw3bON3XkEUrRhHN5LgF+w9fFoePqaZ3DEFHLyqDiFI+VdJPb4SbiTHvFN6Kf4y0oWZp/9f7qH+6mWK70719GvBhdJp1B7KjmvasflDzcHqU7M66jiUAi+shoZnvftt09jZnPeFwJWPQcqlEm9IV4Cx1l+4ID32Yeoz7wqbU1ofGcKR/LcsxRUehd7vgobRF64oP8/IZ+PZWN7Xv5WjI1zoj++fF+oUG+zMs4zfdSvhR9Z/eRnrAlqlfvrVO3SkT0OCVhbz4aqAgYJxhfwXYzFmIVhp9+bGu4PNgd79B5jmNSbPziD8oeINUxCN5gKWvUiPSHJqVJZu5OyZSbx8Vqh3JaH8AyKSQEov+kRdIfySaJbCzJXxUWjZy526b7Izt/QQj36xb43keu8CMD7NfaWvD1gKUN9PVfb2UvpiMCd9S1+hif9TdIoU9ZiWVDAEFMNpaA4fKvnWkFd7plHj2Sl+2mIEqzyco1lAZC9YYqCJ4BZAnugI83n9S5TsOTJy03z4uI+90eOcGOqw/ES82mfZ7yBWMxq68ZxYbePNJPHnjHRM2pmbH5IUvIbN0/UETk6LfPTw4Xi1RRANXvWuzaKhCzgzOaCWBQHS735NSK7w445RaI3IWpv43CakQ/yJYdPz9sXmBiby27kLjRmol5aZ/uRzwNhYDfxkWY2tMmmY43Eo/UbZ2k2ROhKKsYFU1dSug/S07ux2FPn3JbN6bTLHDQ+7uU3lZ/kfCZDVAt1W01Q3oLcXLVP21EXHOkmmBu2TzDCQeoWWra3u3EcvbXoUozwE5s4Ex/sb13QDRS/XXwzPp/FNTlPFIzULo1+fVY+bVfmzBI0SMajsGL08Yq0ITFFXPALH9yyjLrmmW7tVUiO8T6R1O9rEForPDU5aPV4CCTKfxBg+KXPgYZ1Hnhl1EO8KFergwAns2kIiWqmRIKXasBlVT6Pksg4pha2IzEvclVJFmYY3cYXuC5LmZCGwlYSBakUtPWGEP9h5lqSfAG+Y4VVKETFTveDl5uS0i1RoKSVavrb+7ZLAO4w96bNCyHgGppvRZnpqWjOqag1ThshJHTm3HG01a+I5oueQzvs98+lxMUfxwnCknMCYzxSkjwN+wL/PRSInKOixXqLyzXSJ4NdTxAeVoHL5i2qoVyD4p8vUtu/1KVBQsxa2OlU35xZ772oP3wZUbVqCD0VGiDKCyRuT1rN1EF666N8fj+2qcOe4bohYyMh25b0W1Knudyy3Z+k53DNDkhAvxvSg/o7gWyfKl4ZCGGE9CYtwk7Xtj5ansc6xYE4iMqvWLlDuJ5mKbJVSMdaewx2CLE+LnPPYtwgD5k5ZGthdNmD3jEu6aIE9FnFI2DXDUeLPoCwBsqkkBSDPjj/UNRhqFPm9wWruyJS0zyDN9bZMll/VFoQy8/GtHf7hWcWTvFVO8pm7vKnwA72vUN4Q0yX/twiqMhl0mVLzbyhEXYtlc/pjqp7ItfpS3leHZuULBprKM+Xp/91+2j7vn0HddzoO604gpPe0D0miFMdI1UFSh6wHFiIP5RxwBA3c2RJFBFVHQcgeK/6lRdIKKaPPcDxLzXLqZDETVkWXC5kRrrvsX/CcMMVcAvcq3l+eNvbKbIgKfnBsbd8vML+kyTkZftEgG2faRYO3qvQrkdD4+CUj2G7a1CpsKCJizrfp+a85Mh2Dlbvb/goCYjABFOM24vg0qlN0NsInmXLUPLz56EthSraUcPiK8YAnZahgJmV/xk5Z4N6cAi7YDW0q+sdjZI8AJ8Vc+tPqW/SIwRe2N0t4VoEakSUl5OeNq4v8BmBaYaQ== \ No newline at end of file diff --git a/docs/cassettes/streaming-from-final-node_55d60dfa-96e3-442f-9924-0c99f46baed8.msgpack.zlib b/docs/cassettes/streaming-from-final-node_55d60dfa-96e3-442f-9924-0c99f46baed8.msgpack.zlib deleted file mode 100644 index b0dd754569..0000000000 --- a/docs/cassettes/streaming-from-final-node_55d60dfa-96e3-442f-9924-0c99f46baed8.msgpack.zlib +++ /dev/null @@ -1 +0,0 @@ -eNrtWktv29gV7nTXrLrorhuWKDBAIcqk+BDlIChky3bkp2TJseVMIFySlxIlvsx7qYcDo2haoGuiv6CJY0+NNJPBBG067XTdRf+AZzHob+iuux5Scizn5aB1gDqUFhbJe+655zvn3HMuP+vBcQ8HxPLcT55YLsUB0inckN8+OA7wXogJ/fWRg2nbMw4rG7X6ozCwTn/WptQnszMzyLeyno9dZGV1z5npCTN6G9EZuPZtnKg51Dxj+O0nv7nPOpgQ1MKEnWXu3md1D9ZyKdywfZjyKWFoGzN9jOArYCyXcYf6z9kMwwaejWOpkOCAPbgHTxzPwHb8qOVTTszKHA0DzYtlXXgqwDehAUYO3NAgxHBPseMDMJCLVfFZPn7mefbYFjr0kyXM0E2wx6peXs8y91kXOYlAC9Pm2MRYxsBEDyx/LMZuEQwgLEDiMSA5Acb0AgfFYtl4mo8C0AeeJolyPwAPBtTCo1vdosPkArthDOEuC56IpxGTjdGfGQsYLbfFHhzEPoJAWQE2EvFEwaSkp3WwTkHy4N7BcRsjA1b+7ns/PGx7hEZPL4bwC6TrGNyKXd0zQH/0h9a+5WcYA5s2ovgEwubixDHRSRdjn0O21cNHo1nRM+T7tqUnUGc6xHOfjMPMxba8PnwSB5WDpHBp9HwDjCiWZypDyDWXEbKSmuWfDThCkeXakDucjcCeIz8Z/8vkgI/0LijhxnkcHY0mP52U8Uj0eA3pG7ULKlGgt6PHKHAU6avJ50HoUsvB0fF85fXlxoPny4lZQcgWvrygmAxdPXpsIpvgP12YjGkw5HQPdES/4490z+taODr9V7Opm03NubWxopWy2lq50ayJxTl5tYHcrlMytvbWC+t9c7Ho7LclZbts7NMFTsjn8qKcz0s5TsjyWSErcKVKg/dWBJfPubK91+gYQmPRF+qblaXNinQHF9b3SbfQX13MLdkrqGfptaWettE2t9aELam/v1u3wtvDUK5VVsOdOVc37L1yHWvVmwxYF/Ys41YtWF0o7bbi7FjcExtyc4nsbJd3CvmmbbQGSw5uSEFDGy66g+6EeTlF5fixhQovqXz8eXqWGzZ2W7QdPRILwucBJj5UDfyrI3AZDcmDQ1gJ/+Pvx+Pq8XBj5TyFf3RYgpyMvqm3wwyTU5ga9pkcn5MYQZwV5VlJYpbW6k/mx8vU4xQ8ZSge0Bnci5+MysRNBmpWQDC9FVKTU7+sB8glJuTlwtkeONbbodvFxsn8G7P/mzj7IbQxHqhWHB74HsHc2MzoyQ63OSqkXLn01WircV7QQq61n2yF6PdxZoMRlvt8PAw1IVYJi3MOiR4JqvJ0PHKWdCcAlOcEnuOFP8elQIc9FhvuewEAwzqUaTqMTjMOGsQb7JYoyKICXr8J5Ui3QwPXQq3kObAmucn4AbY9ZHw94KBIYttyLIhK8nfcAmDzCHHMXrwuQb0udkn0ucyPPn+bFAlwvEIM46WiwwJ8/vpmoTNdUixTyMtfXxSDGJ3reaQ45MXr42MVD3nyZHAmzFlGdPpTuGliyTQkVcsZ+ZyMhbwsaRKvYplHEtI1U1O/mF/k5pHexlwtSb/ouNRYL66V5/+4w03mEbeRVH0Ydz3iWqZ5VMMBhCY60W0vNKBWBvgIdG0WG9FzVS+ISFQNCZaQcmKBW9jeTNriL49GRfzbH/zCQBTFRd+CGs7GPVSHDsoV56y+1dc6BaXbu9Op+pbsrJSD7VauR9drbOasso9mZM+7bjZJWBDQIcFp3Bde7kUpc9Y/L7ZPyKOcDDPIkEC3bJpgFg58sA7Uu6Ftg662Z+lxk4KOabkGHrCzfAZ6oE0RO3t/3KdZBFkLWxemZc57/EhB3HGbOrLtV3WMQMNAU11eUTqeYohztlEoLDrra5Zaqe4KoGzUzSZ69ESLPuvQFxs0i4JW6IABsB4L7e9eBhqlGRJkjyw6yLC214KtppEzEwG2RdpN8BqJ9SZS9w5u3Lj+4Xmr8ye9OOmw+58lPps66V1OSs5aUy9d4qXP2NlpMl3uJnfqostcBG9CUx9dutsO0uaky7FOunEC893SxvrCvRs3rpLr+P6zK+I6Mszk1ATWucjEOYu5kCNvIzTi4DKXnbLej/eYyDYmPifEXRBqPPNZTFW8TL8L1rNlyjhWq00ZDTPJGXk4Bj6JO8ZxAU/zfayekkJTUmhKCn2kpJCiqldLCskfDSkk5FNICinqlZNCpmmKIp83eV2DxsIrOTFnFhTRlCRZ0hQDf0BSqMCrqoHfQgr9+x3nslZxUJO7hW5lu7fs5ne8ku5sWNs9dI1IIZb9IHzMNfHMuRvqbcymFPrZaSi1+C03tdDXG/OpxZ68CaUWvZbeejd6800r/GyqgL8HKUWo539gOur0n+9gozZxH94IxgSF5Sa0VC/GxHgmU7SZTTg3B5MEzcgXr1JT8RFmks6C0v4q2ZP9YD/lmbIX/+/shap0alVtszrP7wpZs6cKi3WpONfG1Q5GneZuZ2dJodlde52W++f0gPiSvchWct253KDf77XcnUrV4ddIdV9abYSFcmdv1S4oeqE6rK4Uw/zOHdrp7w/yy+pC3hY9qy42avXmxubS1ry3WRouN8K8r9BSf7+ebZ2zF/3lzWbNXt6CzTDc1/vFpdUFrex0q6ZQkaWsbuB5l6z1VlwNbU2YJ0rKe7AXuZw0ZS/exF48VFJJXohXTl4g2dAEQ9TziqyIOVORRF4yNdPM4YIsS5rxIckLUS2IWj6nvYG8+PGLt/fvjjlQ+3m+Oqzkt/U5tRNWu7t3bq8Xxf+uf8sfD3lxXTxz7obb+IpOs9cPOmN6dpekFX0mtWG34XxD0ove6uLUgu+nFvqnvfRGveXR1GInnpPewF8lW3cdY9+Nf36SVvRX9a+ZawidttO75+esVmqxF32wI71Z7xkotcU+m9qwr2Dsp7jSeyS9Oz50tLRiD7Bto/Qe7trITfHBvhMSOj3Xp/B9FqW32P8kVcD/h9+e/AcxwUIS \ No newline at end of file diff --git a/docs/cassettes/streaming-from-final-node_68ac2c7f.msgpack.zlib b/docs/cassettes/streaming-from-final-node_68ac2c7f.msgpack.zlib deleted file mode 100644 index e34a76cf79..0000000000 --- a/docs/cassettes/streaming-from-final-node_68ac2c7f.msgpack.zlib +++ /dev/null @@ -1 +0,0 @@ -eNrtXM1z28YVj9tLx6ce2juM6UxmOgIFECBISKODPhlZH5REKhYVezgLYEGCBBYQdiGS8ujQtNNrh23/gfpDSjWuk0wybZo2PefQf0A+5A/osdde+gCSFhXbsZPQkyhLacYkgLdv3+997Hv7sPK7p4c4om5Arj1yCcMRshhc0D+8exrhgxhT9psTH7NGYD/YKpUr9+PIPf9lg7GQzkxPo9DNBCEmyM1YgT99qExbDcSm4Xvo4ZTNAzOwu0+u/fau6GNKUR1TcUZ4565oBTAXYXAhtmGI4FKBNbDQxgg+IsElAnXEKUGMAg8nRDHFkXh8B+74gY295FY9ZJKayUksjswgoSVwV4FPyiKMfLhgUYzhmmE/BFxAl7CSM3JyLwi8gSisG6ZTODFJoSesnn6fEe6KBPkpQR2z2kDAhMbG1IrccEAm7lIMEBIcgQCUI1CcIPJRQpZJhoUoAn6gaJoyDyNQYMRc3L+0XNZNv2ASJxDeEUnXSoaBOhL0Q2EBo0vq4vFxoiOwkxthOyVPGYxSBmYTWwwoj+8cnzYwsmHm3z1oBJT1Hl+23/vIsjAoFRMrsIF77y/1IzecEmzseIjhM7AZwalaemctjEMJee4hPumP6n2AwtBzrRTodJMG5NHAxlIiybOPzxKTSuARhPU+LoEQ86vTW11wNCIoGa2QkT/oSJQhl3jgOJKHQJ6TMH3+j9EHIbJawEQaOHHvpD/48ShNQHsPN5BVKl9iiSKr0XuIIl/XPhq9H8WEuT7unS5uPTvd4OHFdGpGUTLGh5cY0y6xeg8d5FH8t0uDMYu6khUAj96f5MdD/XiY1Fmjd1815PciTEMIG/zrExjGYvruA7AF/vfnp4PwuVdaGxrxizd+/mAJ7NL7rNKIp4SsLpRxKGTlrCYo6oyam9GyQnGj8mhxME0lMcO5wHCHTePD5E4/UGYFCNqIYjYXM0cqfFiJEKEO2GZ56AenViMmLWyfLT7XAz5LPADgJXggXiXcCQOKpYGYvUd70k5/JZFWlz7qu5sURHVE3KPUHXp/TqwLQrjk48FjiIqEJUwu+bR3P6uqjwdPhoo/A6CypMiSrPw9CQYL/CwRPAwiAIYtWKdYt3c+5aNO4mRzqpJTdVmWZyEgLS+2cTk2lwIf5qSzQhhhL0D2px0Jlgnsub4LVkn/HayB4EAKDJY/eZaCBS1MaO+9nNz/+dcoSYSTGRIYTxk9MODnn88nGvLSEhojn/v0MhnY6ILPfd2nnzz7fMDinkwfdYbEkmv3zn8BF7VcwdLzqqKqsoKQAa7iFFTF0HJIyauWaRnvL65Ii8hqYKmcul/vdKm6Ob+xunhWBt6LQdBy8e+fXPtxrWY5NdOfK62ZSxlzY7VaK6vzC7n1KiItf8nePdg0NtvOyrx/1ND0W6v2EVuWlHw2r+byeS0rKRk5o2QUaWmrKgdrCpGzJOcdVJu2Ul0JlcrOVnFnS3sbG5tHtGW011eyRW8NHbpWuXholhrO7oayq7WP9itu/FY3zpW31uO9BWLZ3sFqBZvbYE9YeuemZwXwRFgY6dwgQCQIECkJD21GHobHrGCnXjCXubwYzgpvQZIrEa87C3EF7oThExbussvw3GZA8PkfQQfxoWvPlaP15aX9ehItKwdqNVcr0r1bq3tGvubZ9U7Rx1UtqprdFdJpjSghqxckeaAHXdYKqfNciP4NpfrrnjQa71IpzU9gRxJQ4jrOSRlHEEK9M8sLYhvW9QifgM135qu9jwuWoSI1jzUEv1nVkJZv7aT5+1cn/XTz5Ce3bcRQkp5cyDZikuwtSPXS/ILbduK37HYzXNhcILRUXttraoX9Tv4gr4pTwxzUH5G5KA8y6cICBBYsRCzJYE/1o0wNM/3lRA/xns3BCNqlkNdrDoiFoxCkA/Yk9jzg1QhcK0mnkNtdYuOOOCNPQbb2GBJn7g4qChHB6gJLLAybuihG+gyS2qBmIc/7Mo8+aHhQK66rHRMtI8tUaInteGtLh6ps5JeBWT/vjlQTI8XEsJa4XEqIKKrHPggA84mQqO9MQUp3Yoq8vkTHU6IX1GFJNOlQRIDt0kYNtEYTvinVnePr16++eV6o/FEtjirs7u1UZxMlfZWS0qpwoqWXaOm2ODNxpperCbYiEx291JWOeVPSy7GOqnEE8ztLpc3lO9evj7Pj8KNH4+k4TAmjI1NUFyQjNYRwyUVe1FZIbCu8rIJ4te7DiLMJSQ5MVnhYv4TbEJ5Pne+S8OIqe5MKyGujLhVoTEj3WdAJiEtgaq8i8qQvM6a+zBdv/HTSmfnuOzMnVrrR7Z3/93u+z30NO9BnulJ6Qft6XamfcdOVyikcdqX0wti7UpqFdaug5bKWg/Wcqed0I6vmVd1BBRmjPH5hV+rbdzsKio3z6vO7Hdf/9+KarM6K2rqpkVbR3vWLR3Jze7kY+2V7+ZvVZNnvotshiq+j0XBVNHOhhkoDi5xCH1ZD3OJ3CbfQy4hf7CuQlC2XWgG/jk+5hd7fAnMLP93684r+BlfAX6ElR1kQvuZm3Pl/vqIXt4PbsCcatGhckvbkDhNMQuAI856wAzuHaLRF1dfFlztzSRE32suD5HaxyCe9vtG+143XdrJocsLme33CJqtq4z1ho/5Aehn3CgUeWxny2FsZhqbLmqGrctbOmma+kDMKumxi3UB5U7U15esesCnozfK2ubO9KO8rGeewoKxUtPmFBt5uYtSs7Tf3ijrL7HubbLV90dlTnzYeM1vZ1kK2024f1sne1rYvb9DtI229GhurzYN1z9AtY7u7vTYf5/feZs32USd/s7Cc99TArajVcqVW2inuLgY7S92b1Tgf6mypfVTJ1F/1gI06pgM27Zs7tbJ3cxdyRffIas8X15fNVb+17ShbOS1j2XiR0I3DNWKi3RElqJr+vTxgU8g5hmPks+ZzWk7GVxywacjxTc3Yrqxsvx3v69pKCRXr3cbRXvmb1RzqD6fldFU0c6GGW9jzRE6xC07gtSiv6Ke4NTsIwS12H3MLnfG81HWDeLLQcefxDX6jfazvk64g/nG9T7qC0Mf2PukKYh/z+6Sr6PjcVvRjfZ90BeGP8X3S1UN/g1uzV0AaXsG/yW+0R269wSZ7mslGlh/sFseFLe6E6V8p8Gp6DyN+N7O0lfz5C7dlPbG5xQ5SEMZtUS8EDs+7OdpwCccdzAYmkzKPwz1dhCdd68nLGp6wL7oc53izO/F5DrEvIG7tnuH31SS/7+XqETJ5LmcjnndydQ9ROmnh8Bn4HB+3hEltjss7frMdJs2AX8PTgONzxiZGMXOd2JucvZx0sSZnLydnLydn0H6QwL/F/2nwf6qPfN4= \ No newline at end of file diff --git a/docs/docs/how-tos/index.md b/docs/docs/how-tos/index.md index 217fcb07da..9b07bfad69 100644 --- a/docs/docs/how-tos/index.md +++ b/docs/docs/how-tos/index.md @@ -83,9 +83,9 @@ See the below guides for how-to implement human-in-the-loop workflows with the ( - [How to stream graph outputs](streaming.ipynb) - [How to stream LLM tokens](streaming-tokens.ipynb) +- [How to stream LLM tokens from specific nodes](streaming-specific-nodes.ipynb) - [How to stream events from within a tool](streaming-events-from-within-tools.ipynb) - [How to stream events from within a tool without LangChain models](streaming-events-from-within-tools-without-langchain.ipynb) -- [How to stream events from the final node](streaming-from-final-node.ipynb) - [How to stream from subgraphs](streaming-subgraphs.ipynb) - [How to disable streaming for models that don't support it](disable-streaming.ipynb) diff --git a/docs/docs/how-tos/streaming-from-final-node.ipynb b/docs/docs/how-tos/streaming-from-final-node.ipynb deleted file mode 100644 index aef5d346a5..0000000000 --- a/docs/docs/how-tos/streaming-from-final-node.ipynb +++ /dev/null @@ -1,351 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "15c4bd28", - "metadata": {}, - "source": [ - "# How to stream from the final node" - ] - }, - { - "cell_type": "markdown", - "id": "964686a6-8fed-4360-84d2-958c48186008", - "metadata": {}, - "source": [ - "
\n", - "

Prerequisites

\n", - "

\n", - " This guide assumes familiarity with the following:\n", - "

\n", - "

\n", - "
\n", - "\n", - "A common use case when streaming from an agent is to stream LLM tokens from inside the final node. This guide demonstrates how you can do this.\n", - "\n", - "## Setup\n", - "\n", - "First let's install our required packages and set our API keys" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "c04a3f8e-0bc9-430b-85db-3edfa026d2cd", - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "%pip install -U langgraph langchain-openai langchain-community" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "c87e4a47-4099-4d1a-907c-a99fa857165a", - "metadata": {}, - "outputs": [], - "source": [ - "import getpass\n", - "import os\n", - "\n", - "\n", - "def _set_env(var: str):\n", - " if not os.environ.get(var):\n", - " os.environ[var] = getpass.getpass(f\"{var}: \")\n", - "\n", - "\n", - "_set_env(\"OPENAI_API_KEY\")" - ] - }, - { - "cell_type": "markdown", - "id": "eb79e50b", - "metadata": {}, - "source": [ - "
\n", - "

Set up LangSmith for LangGraph development

\n", - "

\n", - " Sign up for LangSmith to quickly spot issues and improve the performance of your LangGraph projects. LangSmith lets you use trace data to debug, test, and monitor your LLM apps built with LangGraph — read more about how to get started here. \n", - "

\n", - "
" - ] - }, - { - "cell_type": "markdown", - "id": "17f994ca-28e7-4379-a1c9-8c1682773b5f", - "metadata": {}, - "source": [ - "## Define model and tools" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "5e62618d-0e0c-483c-acd3-40a26e61894a", - "metadata": {}, - "outputs": [], - "source": [ - "from typing import Literal\n", - "from langchain_community.tools.tavily_search import TavilySearchResults\n", - "from langchain_core.runnables import ConfigurableField\n", - "from langchain_core.tools import tool\n", - "from langchain_openai import ChatOpenAI\n", - "from langgraph.prebuilt import create_react_agent\n", - "from langgraph.prebuilt import ToolNode\n", - "\n", - "\n", - "@tool\n", - "def get_weather(city: Literal[\"nyc\", \"sf\"]):\n", - " \"\"\"Use this to get weather information.\"\"\"\n", - " if city == \"nyc\":\n", - " return \"It might be cloudy in nyc\"\n", - " elif city == \"sf\":\n", - " return \"It's always sunny in sf\"\n", - " else:\n", - " raise AssertionError(\"Unknown city\")\n", - "\n", - "\n", - "tools = [get_weather]\n", - "model = ChatOpenAI(model_name=\"gpt-3.5-turbo\", temperature=0)\n", - "final_model = ChatOpenAI(model_name=\"gpt-3.5-turbo\", temperature=0)\n", - "\n", - "model = model.bind_tools(tools)\n", - "# NOTE: this is where we're adding a tag that we'll can use later to filter the model stream events to only the model called in the final node.\n", - "# This is not necessary if you call a single LLM but might be important in case you call multiple models within the node and want to filter events\n", - "# from only one of them.\n", - "final_model = final_model.with_config(tags=[\"final_node\"])\n", - "tool_node = ToolNode(tools=tools)" - ] - }, - { - "cell_type": "markdown", - "id": "9acef997-5dd6-4108-baf1-c4d6be3e4999", - "metadata": {}, - "source": [ - "## Define graph" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "8c7339d2-1835-4b5a-a99c-a60e150280af", - "metadata": {}, - "outputs": [], - "source": [ - "from typing import Annotated\n", - "from typing_extensions import TypedDict\n", - "\n", - "from langgraph.graph import END, StateGraph, START\n", - "from langgraph.graph.message import MessagesState\n", - "from langchain_core.messages import BaseMessage, SystemMessage, HumanMessage\n", - "\n", - "\n", - "def should_continue(state: MessagesState) -> Literal[\"tools\", \"final\"]:\n", - " messages = state[\"messages\"]\n", - " last_message = messages[-1]\n", - " # If the LLM makes a tool call, then we route to the \"tools\" node\n", - " if last_message.tool_calls:\n", - " return \"tools\"\n", - " # Otherwise, we stop (reply to the user)\n", - " return \"final\"\n", - "\n", - "\n", - "def call_model(state: MessagesState):\n", - " messages = state[\"messages\"]\n", - " response = model.invoke(messages)\n", - " # We return a list, because this will get added to the existing list\n", - " return {\"messages\": [response]}\n", - "\n", - "\n", - "def call_final_model(state: MessagesState):\n", - " messages = state[\"messages\"]\n", - " last_ai_message = messages[-1]\n", - " response = final_model.invoke(\n", - " [\n", - " SystemMessage(\"Rewrite this in the voice of Al Roker\"),\n", - " HumanMessage(last_ai_message.content),\n", - " ]\n", - " )\n", - " # overwrite the last AI message from the agent\n", - " response.id = last_ai_message.id\n", - " return {\"messages\": [response]}\n", - "\n", - "\n", - "builder = StateGraph(MessagesState)\n", - "\n", - "builder.add_node(\"agent\", call_model)\n", - "builder.add_node(\"tools\", tool_node)\n", - "# add a separate final node\n", - "builder.add_node(\"final\", call_final_model)\n", - "\n", - "builder.add_edge(START, \"agent\")\n", - "builder.add_conditional_edges(\n", - " \"agent\",\n", - " should_continue,\n", - ")\n", - "\n", - "builder.add_edge(\"tools\", \"agent\")\n", - "builder.add_edge(\"final\", END)\n", - "\n", - "graph = builder.compile()" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "2ab6d079-ba06-48ba-abe5-e72df24407af", - "metadata": {}, - "outputs": [ - { - "data": { - "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCAFNAMwDASIAAhEBAxEB/8QAHQABAAIDAQEBAQAAAAAAAAAAAAUGBAcIAwECCf/EAFcQAAEDBAADAgYLCwcJCAMAAAEAAgMEBQYRBxIhEzEUIkFRVpQIFRYXIzZhcXTR0zIzN1RVdYGVsrTSQnKRk6GzwwkYJkRSU1disSQlRYKDhMHUkuHw/8QAGwEBAQEBAQEBAQAAAAAAAAAAAAECAwUEBgf/xAA3EQEAAQMABgcFCAIDAAAAAAAAAQIDEQQSITFRkRRBUmFxsdEFM5KhwRMVIzJCYoHhIlPC8PH/2gAMAwEAAhEDEQA/AP6poiICIiAiKLvl5dbBBT00BrLlVkspqffKCQNl73deWNvTmdo94ABc5rTqmmapxAk3ODGlziGtA2Se4KOkyazxOLX3WhY4eR1SwH/qotmDUtwe2ov8hv8AVbDuWobqljI/3cGy1o33F3M7u246UgzErHEwMZZrexo6BraWMAf2Lti1G+Znwj19Ia2P17qrL+WKD1ln1p7qrL+WKD1ln1p7lbL+R6D1Zn1J7lbL+R6D1Zn1J+D3/I2Huqsv5YoPWWfWnuqsv5YoPWWfWnuVsv5HoPVmfUnuVsv5HoPVmfUn4Pf8jYe6qy/lig9ZZ9ae6qy/lig9ZZ9ae5Wy/keg9WZ9Se5Wy/keg9WZ9Sfg9/yNj1pr9bKyQMp7jSTvJ0GxztcT+gFZ6hqjDMfrIjHPYrbNGQQWSUkbh17+hCwPczUY18Pj00vYN6vs88pfDIPKIi47id5gDyedo3zBq2qtlM4nv9f6TYtCLDtN0gvNBFV0/OGP2CyRvK+NwOnMc3yOBBBHkIKzFwmJpnEoIiKAiIgIiICIiAiIgIiICq+MauuQZBdpNOLKj2tp/wDkiiA5/mJldJsjvDWb+5GrQqxhI8FmyGgdsSU90mk0RrbZtTNI84+EI352keRfRb/JXMb9nLP/AIsbpWdERfOjEu92orBaq253GpiorfRQvqKipmdysijY0uc9x8gABJPyLTmbeyvxKycJr/m2PGqyBlsMLBTPoKum53Sn4Nx54dhhG3B+uU61vZC2fn9FQXLBchpLpaqi+22e3zx1NrpGc01XGY3B0UY2NucNtHUdSOo71yzLYs8zLgbxXxK3W7J7ljVLQ0fuYbllD4JdZHMd2k9Lpwa6VrBGwMe9uyXa5n62g6Bu/H7CbBjdrvlxr7hR0NzfKykZLZa4VMhjOn7p+x7VoHncwDRB3ogr1unHvAbNjmO36pyOD2oyF5jtdVBFLM2qeGOfyNDGkh2mOAaQCXDlA5iAtX8R88vma3HDqyKz8RLNgU7Kttzp7La6mluzqxoi8HZK1gE0cBDpfHYQC5oBdrSpXCrBL/R2jgrbq/F73RmxZte56yK40z5HUsT462SGWSTxmuae2iAkDi0vOg4lBt1nsn8fl4t2jDY6G7eDXK0tuEVc6z14f2r52RRxuiMG42acXOkeQ1p0HcpW51o/Oai4YT7JKx5ZJj16vFircZmshqLLQvrHU9T4XHK3tWsBLGFu/HPTY6reCAiIgq9Dq0Z9W0TNNp7pSeHtYN9Jo3Njld+lr4P0tJ8qtCrEjfDeJVO5uyLdaZWyHXTdRNGWjfn1TO6fKPkVnX0Xv0zO/EZ+nywsiIi+dBERAREQEREBERAREQFAXqgqKC6Mvtug8InbF2FZTNOnVEIJc3l8naMLnFu+hD3jpsET6LdFU0TlVeuNsxnihjr6O40VDkNnkeO1pK2BsrA9pBAfG8eK9p14rgCD3gFVf/Nr4T/8N8W/VEH8Kt11w613es8NfC+muGgPDaKZ9PM4DuDnsILgOviu2Op6dVhe4iYABmT35jR3DwiN39royf7V11bVW6rHjHp6QbEfj/Azh3id4prtZcHx+03OmJMNZR22KKWMkFp5XNaCNgkfMSrwqv7iaj0qv39dD9knuJqPSq/f10P2SfZ2+38pMRxWhFqvitb7rhvDy+Xq3ZTeDW0cHaRCeWEs3zAdR2Y8/nVs9xNR6VX7+uh+yT7O32/lJiOKzkBwII2D3ha3/wA2vhP/AMNsV/VEH8KsPuJqPSq/f10P2Se4mo9Kr9/XQ/ZJ9nb7fykxHFASexv4UzSOe/hxi73uJc5zrTASSe8k8qttzvlFjsdNb6eNstc9gZR2yn0HuaOgIH8lg6bceg+fQOF7hnSDlnyO+zs67b4WIt/pja0/0FSllxu247HI230jIHSkOllJL5ZSO4ve4lzz8riUxap2zOfl8/6NjzxyzSWqComqnsmuVbL4RVyx75S/lDQ1u+vK1rQ0fNvvJUuiLjVVNc60pvERFkEREBERAREQEREBERAREQEREBERBr32QJA4N5SXEgeCjev57flC2Ete+yB37zeU61vwUfda1923z9FsJAREQEREBERAREQEREBERAREQEREBERAREQEREBERBrz2QY3wZyrqG/9lHU9w8dq2GteeyD17zOVb6DwUeTf8tq2GgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICKEyPI3Wd9PS0lMK66VIc6GnL+zYGt1zPe/R5WjYHQEkkAA9dQft7mB/8PsY+TwyY/wCEvpo0euuNaMY75hcLuipHt7mH4hY/W5vs09vcw/ELH63N9mt9Fr4xzgw0P7Oz2SFZwXsEGPTYfJdrXkdI5kV3bXiJsUzHgvjMZidvTeQg7G+Y9OnXbvsbuNNZx/4ZxZjU4zJi1PU1UsVJTyVfhBniZodsHcjNAv7RutfyN769Kh7IPhNd/ZD8O58VvFNZ6IdvHU01dDUSukp5Wn7oAxje2lzSPM5XXE6S/wCE4xarBaLTYqa2WymjpKeIVc3RjGho2ey6npsnynZTotfGOcGGzEVI9vcw/ELH63N9mnt7mH4hY/W5vs06LXxjnBhd0VKbf8ujPM+12aZo72MrpWuPzExEf/3k71ZbHeoL/b21cDXx+M6OSGUakikadOY4dRsEeQkHoQSCCeVyxXbjM7u6cmEgiIuCCIiAiIgIiICIiAiIgIiICIiCkXfrxJi+S0nXyfDf/oKUUXd/wlR/mj/GUovVn8tHgsiIiygiIgIsK83qgxy1VdzulZBb7dSRmWeqqZBHHEwd7nOPQBZjXB7Q4HYI2Cg+rC4dn4TKB5Bd36H/AKEJ/wCpKzVg8Ovv2U/nh37vAlXuq/482o3SuKIi8xkREQEREBERAREQEREBERAREQUi7/hKj/NH+MpRRd3/AAlR/mj/ABlKL1Z/LR4LLTHHusnvV/xnEbO7IZshrY6mtjprLfDZ4RBH2bXy1FQ1rn6DnsDWtadlx2NLUlv4jZDcuF3Cw5hkd3tePz3i62vIL5aqh7apz6eSeOjY+eJoeGvMenSNDS4tG9cxXS+ccLMY4jy2+a/251VUW8vNNUQVU1NLGHgB7RJE9ri1wA20nlOhsHS13xD9jvRPxW12jB8fstPBR3Ka4eBV1zuFAyN0rC1/YS0r+aHZPVgBYevigna4zE5yjUFJfM/iseD4jS1t5miyu/3uaCvu95nttwrKCn8akY6oMUj4DIw8+mMa5wj6cvM4myX61cS8Qx6y2m+ZFWWukuea2yjoZaG+SV9dBSTNe2eGSpfBGZAXeM3na4jfUnlBWxMG9j9HLw4mxriJJHkzXXR9ypIG1lVKLWNNEcVPVSP7fxdOIfzB3wjh0CuFu4L4darNbrVT2hzaO33SO9QCSrnkeKxn3MzpHPL3kf8AOSDobBUimRzjxgoquj4f+yIw6W93q5WeyWy33OgNfcpp54jNHIZInSucXvj5oQeR5I8YjuXVGFY5TYrjtLQUlZcK+ADtBPc6+WtmPN1++yuc4jzDegO5Y9Rw6xysrskq6i1x1E2R0sVFdRM9z2VUMbXsYwsJ5QA2R48UDfN13oL94NgNk4cWT2osFNNS2/tDKIp6uapIPK1vR0r3OA01oDQdDXQLURiRYVg8Ovv2U/nh37vAs5YPDr79lP54d+7wLdXuq/482o3SuKIi8xkREQEREBERAREQEREBERAREQUi7/hKj/NH+MpRfjKbLVurqe825kc1XTQvglpppOzbNE4td0d3Nc0t2NjR24HW+YQFJkV7rRKY8LvAEUjonF8tKzbmnRLeaYbG+5w6HyL1acXKKcTGyMbZiPNrGViRQnttfvQy6+tUX26e21+9DLr61RfbrWp+6Pij1MJtFQ8/4rt4XYtV5HlGP3G02alLBLUyT0j9Fzg1oDWzFxJJHcD5T3Aqbo8iu9xo4KqmxK5T008bZYpY6uiLXscNhwPb9QQQU1P3R8UephYUUJ7bX70MuvrVF9unttfvQy6+tUX26an7o+KPUwm1g8Ovv2U/nh37vAsRtyyCU8rMRronnudUVdK1g+cslcf6GlSXDpoZYi6dk1PdamR9XW0tSzs5YZHOcwjl2fFHZlrXAlrgzmaSDtc7sxRbqiZjM8JifI3QtSIi8xkREQEREBERAREQEREBERAXxzgwbcQ0ecr49wY0uO9AbOhs/wBCr1LSDMBDXXGma+zu8Hq6G31tI6OaOVji9s0rXnYdvs3Njc0OjdGHHT9BgfmSh93FP/3hTvZj88MsM1or6cB1V8KA18g2fg3MYT2bh4zZdPaNFqsqIgIiIORP8oXwjz7ixhdCLHcrNQ4hZWur6+GsqJm1FTOTyM0xsTmlrGk6JcDt7unQLcvsY8DzThhwgtWK5zcLbdLpa3Op6aqtk0srHUo0Yw50jGHmbtze7XK1vXzSvH3TuEWQxHlLp2RQMDiQHPfMxjR085cB+lbBQEREBRF8xuC789TC8W+8tp5Kamu8MLHVFM17mOcGlzSC0ujjJYdtdyDYOgpdEEKzIhSXF1HdY47a6aqFNb5XzNLa4mMyaYO8PAZJth6+ISNgqaX4lhjnaGyMbI0ODgHDY2CCD84IBHyhVttW/BaMMuVY+bHaSmLn3i4VHPNCe00GynlG2Bjh8K47Ajc6RxO3kLOiIgIiICIiAiIgIiICIvKqkMNNLICxpYwuBkOmjQ8p8gQQXIzK7qXPMM9ot07XQyU1YSZatjntkbIxh1yxkAcryfHBJa0xtJsSgsFY1uG2V4fbJnz0kdRLPZmBtHNLI0PklhH+w97nPB2SQ7ZJJ2p1AREQERQ2WZTSYhZ3V1S188jntgpqSAblqp3HUcMY8rnHz9ANucQ0EgKvn7nZRl2MYlTu5o2VMd8uhafvdNTv5oGnR6GSpbHoHo5sM3mWwVV8DxersdJWXC8Sx1OR3aUVVxlhJdExwaGsgi3oiKNoDW9BzHneQHSO3aEBERAREQF8IDgQRsHoQV9RBAOklxmvHaPkqLRWTySS1VXWMAoHu5OSNocAXRvfz68Zzmue1rRyECOfWPcLdS3egqaGupoa2iqY3Qz01RGJI5WOGnNc07DgQSCD0KjcQuU9ysrTV11Dca6nmlpame37EXaxvcxw5SSWuGvGbs6Ox1QTSIiAiIgIiICIoW8Ztj2P1QprnfLdb6kjm7GpqmMfrz8pO9LdNFVc4pjMrjKaXwgOBBGwe8FVf30sO9KbR67H9arPEuv4bcV8EvOJX/I7VNarpB2MoZXxte0ghzHtO/umva1w3020bBHRdej3uxPKV1Z4M7AOIWMsNHhsmUYs7KaN0tF7SWmrjjewQlwDGUxIe3ljYNtDdDR14oBWwV/OL2FPBeh4K+yJy+vyO92uS32ejNNabp4SwRVhmcPhIzvWxG1wcO9pfo/L3p76WHelNo9dj+tOj3uxPKTVngtKKre+lh3pTaPXY/rXx3FXDWNLjlNo0Bs6rIz/APKdHvdieUmrPBO3q80WPWqpuVxnFNRUzOeSQgnQ8gAAJcSdANAJJIABJCrGL2Wtv93bleQ0ppatocy02uQ7NugcAC6TRINRIPunDoxpEbSdSPlr2JZRZ+KV/pLvW3Si8GheZLLYX1DDNsf63NHvYl0fFj74mu24CRxbHtVcqqKqJxXGExgREWEEREBEWHdLxQWOl8JuNbT0FPzBna1MrY28x7hsnvPmViJqnEDMRVb30sO9KbR67H9ae+lh3pTaPXY/rXfo97sTylrVngm73fbbjVsmuV3uFJardBy9rV1s7YYY9uDW8z3EAbcQBs95AVG4ccT8SvtdXWyhzLFbtcqm41T6aks1ZCZXs2XdWA7e8NBLnAEHRO1j8Tbnw94p4BfsTu2T2d1DdqV1O93hkZMbu9jx43e1wa4fK0Lj3/J/cHLPwrzDKsszO62ujulDNJaLUyaqjHM0H4WpZs9WuHK1rh0IL06Pe7E8pNWeD+iKKre+lh3pTaPXY/rT30sO9KbR67H9adHvdieUmrPBaUVft3EDGLvVx0tFkNrqqqQ6ZDFVxue8+YAHZ/QrAuVdFVE4rjHimMCIiwjCvVY632euqmAF8EEkrQfO1pI/6Ko4lSR01gopAOaepiZPPM7q+aRzQXPcT1JJP6O7uCs+VfFi8fQ5v2Cq9jXxctX0SL9gL0LGy1PivUkkRFtBERAREQYtyttNdqOSmqoxJE/5dFpHUOaR1a4HRDh1BAI6rPwO6T3rC7HXVT+1qZ6OJ8smtc7uUbdrybPXXyryXjws/Bzjn0GL9lZu7bM90x5T6L1LSiIvOQREQFQ2lt0zi+S1A7V9udFS0wcNiJromSPLfMXF42e8hrR5Ar4qDbPjlmH0yD91hX26L+ue76w1HWm0RF2ZEREBERBj19vprpSSU1XC2eB405jx/aPMfMR1CyeH1xnumHWyoqZXTT8jo3yv+6eWOLOY/KeXa+LH4W/EW3fzpv756l3bYnxjylepa0RF5qIvKvixePoc37BVexr4uWr6JF+wFYcq+LF4+hzfsFV7Gvi5avokX7AXo2fcz4/RepnVDpGQSOhY2WYNJYxzuUOdroCdHXXy6K524W8esotnBW8ZnnlqiqKekrquClmt9aJqmtn9sJKeOmEPYxtZp3JG13MeYDmIb1XRq57h4BZdLgOS4FPcbLFYHV813sN2hMrqyGpNcKyJs8RaGcrXlzSWvJI10Ck56kWBvshJ8WqrzScQ8YOIVVBZZb/F4LcG3COpponBsrWvDGala5zBya0ecacQvCn4332eopbRk+HTYdNkFrq6qyVMdzbVOe+KHtXRShrGmGUMPOAC4eK7xthRt54EZRxcuF7ruItZZqF0+O1GP0FJjzpZo4e3c10lS98rWEu3HHpgGgAdk96zbdwozrL8qxq45/X2JlJjVHVQ0jLCZnvrKieA07p5e0a0RgRl+mN5urz43QKf5CDxLjjeMa4YcFrZFa3ZXlGV2RkzZ7rdhSMkfFBE6Tmne15fK8yDTdEu04kjS6Et809TQU01TTGjqZImvlpy8P7J5AJZzDodHY2Oh0ufqjgtnzuCFh4e1FDguRU9vpJLdJJdfCWjs2NaylqI+VjiyZoDi4Dy65XhbswPH6vFMJsFlr7lJeK63UEFJPcJt89S9kYa6Q7JO3EE9ST16kq056xOrx4Wfg5xz6DF+yvZePCz8HOOfQYv2VbvuZ8Y8pXqWlERecgiIgKg2z45Zh9Mg/dYVflQbZ8csw+mQfusK+7Rf1+H/KGo3Sm1z/xDumX5L7JXGsMhiqKXFoLVJd5X22/SUEs4E0EZkkEcfM4Rl5aIecB/NzFw1yroBUSowO4S8dKHNBNTC1wY3UWd0Jc7tzNJVQytcBy8vJyxOBPNvZHTyjdUZZawsvHG541RZPV1GO19xu8+etxptqffRURxySU0JYYHviYI4tubuMjoXPdzHeldrLxivVwgzegq8SipcsxcU8ktsZd2OpqiKZhfHI2qexga3TZObmYNch7+irlRwIv81fXTistoZPxGp8vaDLJsUkcMMbmHxPvu43aH3OiPGX54kcAb5ml24j1dNXW1kOQCxvpKaqdI6KY0Mkj5Iapob0ik5mt8UuOiSR00cf5QKjm/sjqjPOCHFVlv7LHsmxylppDU4/fGV0PJM/xHw1UIaebxJGuboEa+VdTrm65+x4y/LYuI4u1Rjdnbl1hpbfFBZxK6OgnpnyGIeMxvaMcJNl+mEaADDra6CsBuhslCb22kbeOxb4WKBznU4l143Zl4DuXe9bG1qnPWM9Y/C34i27+dN/fPWQsfhb8Rbd/Om/vnrV33E+MeVS9S1oiLzUReVfFi8fQ5v2Cq9jXxctX0SL9gK03mjdcbRXUjCA+eCSIE+QuaR/8AKqGJVkdRYaOEHkqaaFkFRA7o+GRrQHMcD1BB/pGiOhC9CxttTHevUmERFtBERAREQF48LPwc459Bi/ZXnc7pTWikfU1Uojjb0A73Pcega1o6ucSQA0bJJAHUqQwS1z2TDLJQ1TOzqYKOJkse98j+Ubbvy6PTfyLN3ZZnvmPKfVepOoiLzkEREBUG2fHLMPpkH7rCr8qGOW1Zxe4qg9k65OiqqYvOhKGxMjcGnuJaWDY79OB7ivt0X9cd31hqOtMoiLsyIiICIiAsfhb8Rbd/Om/vnpcLjTWqlfU1czYIWd7neU+QAd5J8gHU+RZXD+3T2rDrZT1MboZ+R0j4nd7C9xfyn5RzaUu7LE+MeUr1LCiIvNQULeMKx/IagVF0sdtuM4HKJaqkjkeB5tuBOlNItU11UTmmcSblW96vDPROyfq+L+FPerwz0Tsn6vi/hVpRdukXu3POVzPFVverwz0Tsn6vi/hT3q8M9E7J+r4v4VaUTpF7tzzkzPFVverwz0Tsn6vi/hT3q8M9E7J+r4v4VaUTpF7tzzkzPFB2rBscsVS2pt1gtlBUN3yzU1JHG9u+/RA2NqcRFyqrqrnNU5TeIiLAIiICxLnaaG9UpprhR09dTkh3ZVMTZGbHcdEEbWWisTMTmBVverwz0Tsn6vi/hT3q8M9E7J+r4v4VaUXfpF7tzzlczxVb3q8M9E7J+r4v4VRuE/DvF7jS5SavHrVVmHIrhBEZqOJ/Zxtl01jeh00DoB5PMtxLX3Bsk0mX7O/9J7l5/wDe/KnSL3bnnJmeKY96vDPROyfq+L+FPerwz0Tsn6vi/hVpROkXu3POTM8UBbcAxizVbKqgx21UVUw7ZNBRRse35nAbCn0Rcqq6q5zXOTORERYQREQEREBERAREQEREBERAREQEREBERAWveDQIpMv23k/0nuXn6/C9/VbCWvODLeWky/xS3eUXI9fL8L3oNhoiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgLXvBsAUmX6AH+k9y7t/wC9+VS/FS5ZNZeHd/uGG01FW5NSUrp6KluEb5IZnN8YsLWOa4lzQ4DTh4xHzLkf2Bfsg+JfGjNMmpLhaLDQ4tT1E9zuVTT0k7Z/Cp3Ethjc6YtaNhzurSeVhG9kFB3GiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgh8pyu34fbDW3B7uUu5IoYm80kzz3NYPKehPXQABJIAJWnrxxdym7SO8CdS2Gn/ksjjFRP8A+Z7vF/QGdPOe9QWTZLJmeRVV1c/npWudT0LN7ayAO1zD5Xkc5PlHKOvKFHr91oPsu1Zoiq9TrVTx3R3YJnGxLnO8xJ+NdWPkFJSfYp7usy9LKz1Sk+xUQi9Xo2j/AOqn4Y9E1pS/u6zL0srPVKT7FVjBbdV8NILpDjN1mtEdzrZLjVthpaU9pO/XM7xojodOjRpo8gCkETo2j/6qfhj0NaUv7usy9LKz1Sk+xT3dZl6WVnqlJ9iqde8to7De7Ba6iOd9ReqiSnp3RNBY1zInSkvJIIHKwjoD1186mlI0fRpmYi3Ts/bHoa0pyDiJmVM9rxkTqnl/kVVFAWO+fkYw/wBBCv8AhPGFt3rYLZfaaO31sxDIaqBxNPO89zevWNx7gCSCdAO2QFqRfiaFlRC+KVgfG8FrmuHQg94Xz3/Z2jX6dXUimeMRjy3meLqlFROD+VT5FjktLXTGe4WyXwaSV526VhAdG8+clp5SfK5jj5Ve1+Av2arFyq1XvhRERcAREQEREBERAREQEREBeFc2R9DUNh++mNwZ8+ui90VicTkcl2FzX2O3ubvlNPHrZ2fuQs5TOaYrJhuR1NNyctuqpXz0Mg+5LXHmdF8hYSdD/Z5T59U3I7HcLz4P4BkNdYez5ufwKGnk7Xetc3bRP1rR1y6+6O99Nf1Km9Tdoi7b2xKVb0ytYeyKuNdbuHsfgtUKGknuVJT3GreHlkNI+QCRz+RzXcncHcrmnlJ6hTvuIyDX4Qr58/gdv/8ArKRs2L11Eall1yOuyOlnjMZpbhTUrYwD3n4KFhOx00SR17lzua12iaMTGevZ6o0FlGFDFeHnEGrt2Q2KWjNj5ZbRj0MkUTJC8GOoc11RLyuIa9uxrm+XSseU2v3tMzrDh1KaSrq8PuNU6GMuf4RUwuiMUrgSeeTb3DmOyd9StwUeC43b7ZVW2lx61U1uqxqopIaKNkM389gbp36QpJ1so318Vc6kgdWxRuhjqTG0yMY4guaHa2AS1pIHQ6HmXzRomI2bJ2fxtnOOY51xux4fRZPwcudgqIK26V808tbWCqMtRUuNDKXvl24ku5z12PFJI6dy6TVcPDzHqZ81TbbNbbRdH8747lR0EAnhlc1ze1aSwjm049SDvZB2CQsFmE5A17SeIN8eAdlpo7fo/J0pl1s26rETGrv4Y4RHXPcLiipowjIAQffCvh+TwO3/AP1lcJZBEwuds9wAa0uc4noAAOpJOgAOpJX1U1TVvjHL6SNicB+Y5BlBG+zFPRA9enNzVHd8utb+cLcipvCvEZsUxsmtYGXOvlNVUsBB7MkBrY9j/Za1oOunNzEd6uS/nntG7Tf0quujdsjlER9G5ERF5qCIiAiIgIiICIiAiIgIiII++2GgyW3SUNxp21FO8g6PQtcO5zSOrXDyEdVqW78EL1RyH2muVLcKf+THcy6GVo+WRjXB3/4NW6kX3aNpt/RdlurZw6ly0AeFGZD/AFG2n/37vs096jMvxG2+vu+zW/0Xo/fWk8I5f2bODQHvUZl+I231932ae9RmX4jbfX3fZrf6J99aTwjlPqbODQHvUZl+I231932ae9RmX4jbfX3fZrf6J99aTwjlPqbODQsPCHMJ3Bro7TStPe99ZI8j5miLr/SFfsK4TUWL1TLhXVLrvdWD4OV7Ozig30Jjj2dHXTmcXO7wCASDfEXzX/amk36dSZxE8P8AuTPAREXko//Z", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "from IPython.display import display, Image\n", - "\n", - "display(Image(graph.get_graph().draw_mermaid_png()))" - ] - }, - { - "cell_type": "markdown", - "id": "521adaef-dd2f-46d6-8f6a-5cc1d6e0aefc", - "metadata": {}, - "source": [ - "## Stream outputs from the final node" - ] - }, - { - "cell_type": "markdown", - "id": "5cfaeb64-5506-4546-96c0-4891e6288ad9", - "metadata": {}, - "source": [ - "### Filter on event metadata" - ] - }, - { - "cell_type": "markdown", - "id": "f218a05d-1590-4d5c-b0b7-97d94c744efb", - "metadata": {}, - "source": [ - "First option to get the LLM events from within a specific node (`final` node in our case) is to filter on the `langgraph_node` field in the event metadata. This will be sufficient in case you need to stream events from ALL LLM calls inside the node. This means that if you have multiple different LLMs invoked inside the node, this filter will include events from all of them." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "68ac2c7f", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Well| folks|,| let| me| tell| you|,| the| weather| in| San| Francisco| is| always| sunny|!| That|'s| right|,| you| can| expect| clear| skies| and| plenty| of| sunshine| when| you|'re| in| the| City| by| the| Bay|.| So| grab| your| sunglasses| and| get| ready| to| enjoy| some| beautiful| weather| in| San| Francisco|!|" - ] - } - ], - "source": [ - "from langchain_core.messages import HumanMessage\n", - "\n", - "inputs = {\"messages\": [HumanMessage(content=\"what is the weather in sf\")]}\n", - "for msg, metadata in graph.stream(inputs, stream_mode=\"messages\"):\n", - " if (\n", - " msg.content\n", - " and not isinstance(msg, HumanMessage)\n", - " and metadata[\"langgraph_node\"] == \"final\"\n", - " ):\n", - " print(msg.content, end=\"|\", flush=True)" - ] - }, - { - "cell_type": "markdown", - "id": "b0bb447a-6650-4166-b124-2d5b99a1f88b", - "metadata": {}, - "source": [ - "### Filter on custom tags" - ] - }, - { - "cell_type": "markdown", - "id": "ea4db927-44b6-46ab-8b8d-f237edaf1438", - "metadata": {}, - "source": [ - "Alternatively, you can add configuration with custom tags to your LLM, like we did in the beginning, by adding `final_model.with_config(tags=[\"final_node\"])`. This will allow us to more precisely filter the events to keep the ones only from this model." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "55d60dfa-96e3-442f-9924-0c99f46baed8", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Looks| like| we|'ve| got| some| clouds| roll|in|'| in| over| the| Big| Apple| today|,| folks|!| Keep| an| eye| out| for| some| over|cast| skies| in| NYC|.|" - ] - } - ], - "source": [ - "inputs = {\"messages\": [HumanMessage(content=\"what's the weather in nyc?\")]}\n", - "async for event in graph.astream_events(inputs, version=\"v2\"):\n", - " kind = event[\"event\"]\n", - " tags = event.get(\"tags\", [])\n", - " # filter on the custom tag\n", - " if kind == \"on_chat_model_stream\" and \"final_node\" in event.get(\"tags\", []):\n", - " data = event[\"data\"]\n", - " if data[\"chunk\"].content:\n", - " # Empty content in the context of OpenAI or Anthropic usually means\n", - " # that the model is asking for a tool to be invoked.\n", - " # So we only print non-empty content\n", - " print(data[\"chunk\"].content, end=\"|\", flush=True)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.9" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/docs/how-tos/streaming-specific-nodes.ipynb b/docs/docs/how-tos/streaming-specific-nodes.ipynb new file mode 100644 index 0000000000..fd9d8c6ee8 --- /dev/null +++ b/docs/docs/how-tos/streaming-specific-nodes.ipynb @@ -0,0 +1,211 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "c5889ca0-6feb-4864-a630-e97ccc2c587e", + "metadata": {}, + "source": [ + "# How to stream LLM tokens from specific nodes\n", + "\n", + "!!! info \"Prerequisites\"\n", + "\n", + " This guide assumes familiarity with the following:\n", + " \n", + " - [Streaming](../../concepts/streaming/)\n", + " - [Chat Models](https://python.langchain.com/docs/concepts/chat_models/)\n", + "\n", + "A common use case when [streaming LLM tokens](../streaming-tokens) is to only stream them from specific nodes. To do so, you can use `stream_mode=\"messages\"` and filter the outputs by the `langgraph_node` field in the streamed metadata:\n", + "\n", + "```python\n", + "from langgraph.graph import StateGraph\n", + "from langchain_openai import ChatOpenAI\n", + "\n", + "model = ChatOpenAI()\n", + "\n", + "def node_a(state: State):\n", + " model.invoke(...)\n", + " ...\n", + "\n", + "def node_b(state: State):\n", + " model.invoke(...)\n", + " ...\n", + "\n", + "graph = (\n", + " StateGraph(State)\n", + " .add_node(node_a)\n", + " .add_node(node_b)\n", + " ...\n", + " .compile()\n", + " \n", + "for msg, metadata in graph.stream(\n", + " inputs,\n", + " # highlight-next-line\n", + " stream_mode=\"messages\"\n", + "):\n", + " # stream from 'node_a'\n", + " # highlight-next-line\n", + " if metadata[\"langgraph_node\"] == \"node_a\":\n", + " print(msg)\n", + "```\n", + "\n", + "!!! note \"Streaming from a specific LLM invocation\"\n", + "\n", + " If you need to instead filter streamed LLM tokens to a specific LLM invocation, check out [this guide](../streaming-tokens#filter-to-specific-llm-invocation)" + ] + }, + { + "cell_type": "markdown", + "id": "dcff85bd-8a5d-409e-93d4-e9242b5e976d", + "metadata": {}, + "source": [ + "## Setup\n", + "\n", + "First we need to install the packages required" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "05157237-783c-49de-9f29-7dca3c285647", + "metadata": {}, + "outputs": [], + "source": [ + "%%capture --no-stderr\n", + "%pip install --quiet -U langgraph langchain_openai" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "efd22cd2-3152-433b-ad50-65be8ace61d4", + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "\n", + "def _set_env(var: str):\n", + " if not os.environ.get(var):\n", + " os.environ[var] = getpass.getpass(f\"{var}: \")\n", + "\n", + "\n", + "_set_env(\"OPENAI_API_KEY\")" + ] + }, + { + "cell_type": "markdown", + "id": "a0ce8c26-f38d-4bdb-89ff-b058e7560019", + "metadata": {}, + "source": [ + "## Example" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "419a3c71-7bf6-4656-99b8-b5d61f3f4bf1", + "metadata": {}, + "outputs": [], + "source": [ + "from typing import TypedDict\n", + "from langgraph.graph import START, StateGraph, MessagesState\n", + "from langchain_openai import ChatOpenAI\n", + "\n", + "model = ChatOpenAI(model=\"gpt-4o-mini\")\n", + "\n", + "\n", + "class State(TypedDict):\n", + " topic: str\n", + " joke: str\n", + " poem: str\n", + "\n", + "\n", + "def write_joke(state: State):\n", + " topic = state[\"topic\"]\n", + " joke_response = model.invoke(\n", + " [{\"role\": \"user\", \"content\": f\"Write a joke about {topic}\"}]\n", + " )\n", + " return {\"joke\": joke_response.content}\n", + "\n", + "\n", + "def write_poem(state: State):\n", + " topic = state[\"topic\"]\n", + " poem_response = model.invoke(\n", + " [{\"role\": \"user\", \"content\": f\"Write a short poem about {topic}\"}]\n", + " )\n", + " return {\"poem\": poem_response.content}\n", + "\n", + "\n", + "graph = (\n", + " StateGraph(State)\n", + " .add_node(write_joke)\n", + " .add_node(write_poem)\n", + " # write both the joke and the poem concurrently\n", + " .add_edge(START, \"write_joke\")\n", + " .add_edge(START, \"write_poem\")\n", + " .compile()\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "fed84d5e-ba10-4324-a664-dca263951a33", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "In| shadows| soft|,| they| quietly| creep|,| \n", + "|Wh|isk|ered| wonders|,| in| dreams| they| leap|.| \n", + "|With| eyes| like| lantern|s|,| bright| and| wide|,| \n", + "|Myst|eries| linger| where| they| reside|.| \n", + "\n", + "|P|aws| that| pat|ter| on| silent| floors|,| \n", + "|Cur|led| in| sun|be|ams|,| they| seek| out| more|.| \n", + "|A| flick| of| a| tail|,| a| leap|,| a| p|ounce|,| \n", + "|In| their| playful| world|,| we| can't| help| but| bounce|.| \n", + "\n", + "|Guard|ians| of| secrets|,| with| gentle| grace|,| \n", + "|Each| little| me|ow|,| a| warm| embrace|.| \n", + "|Oh|,| the| joy| that| they| bring|,| so| pure| and| true|,| \n", + "|In| the| heart| of| a| cat|,| there's| magic| anew|.| |" + ] + } + ], + "source": [ + "for msg, metadata in graph.stream(\n", + " {\"topic\": \"cats\"},\n", + " # highlight-next-line\n", + " stream_mode=\"messages\",\n", + "):\n", + " # highlight-next-line\n", + " if msg.content and metadata[\"langgraph_node\"] == \"write_poem\":\n", + " print(msg.content, end=\"|\", flush=True)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/how-tos/streaming-tokens.ipynb b/docs/docs/how-tos/streaming-tokens.ipynb index 7a06730bce..21915daca3 100644 --- a/docs/docs/how-tos/streaming-tokens.ipynb +++ b/docs/docs/how-tos/streaming-tokens.ipynb @@ -17,16 +17,16 @@ "When building LLM applications with LangGraph, you might want to stream individual LLM tokens from the LLM calls inside LangGraph nodes. You can do so via `graph.stream(..., stream_mode=\"messages\")`:\n", "\n", "```python\n", - "from langgraph.graph import StateGraph, MessagesState\n", + "from langgraph.graph import StateGraph\n", "from langchain_openai import ChatOpenAI\n", "\n", "model = ChatOpenAI()\n", - "def call_model(state: MessagesState):\n", - " model.invoke(state[\"messages\"])\n", + "def call_model(state: State):\n", + " model.invoke(...)\n", " ...\n", "\n", "graph = (\n", - " StateGraph(MessagesState)\n", + " StateGraph(State)\n", " .add_node(call_model)\n", " ...\n", " .compile()\n", @@ -81,18 +81,10 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "a372be6f", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "OPENAI_API_KEY: ········\n" - ] - } - ], + "outputs": [], "source": [ "import getpass\n", "import os\n", diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 571ca35237..a4739d22c0 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -64,6 +64,7 @@ plugins: 'how-tos/streaming-content.md': 'how-tos/streaming.md#stream_modecustom' 'how-tos/stream-multiple.md': 'how-tos/streaming.md#multiple-streaming-modes' 'how-tos/streaming-tokens-without-langchain.md': 'how-tos/streaming-tokens.md#example-without-langchain' + 'how-tos/streaming-from-final-node.md': 'how-tos/streaming-specific-nodes.md' # cloud redirects 'cloud/index.md': 'concepts/index.md#langgraph-platform' 'cloud/how-tos/index.md': 'how-tos/index.md#langgraph-platform' @@ -147,9 +148,9 @@ nav: - Streaming: how-tos#streaming - how-tos/streaming.ipynb - how-tos/streaming-tokens.ipynb + - how-tos/streaming-specific-nodes.ipynb - how-tos/streaming-events-from-within-tools.ipynb - how-tos/streaming-events-from-within-tools-without-langchain.ipynb - - how-tos/streaming-from-final-node.ipynb - how-tos/streaming-subgraphs.ipynb - how-tos/disable-streaming.ipynb - Tool calling: From 55c9498bebd34968edc13fad92065020a005f51f Mon Sep 17 00:00:00 2001 From: vbarda Date: Wed, 29 Jan 2025 22:08:10 -0500 Subject: [PATCH 07/14] redirect --- docs/docs/how-tos/streaming-specific-nodes.md | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 docs/docs/how-tos/streaming-specific-nodes.md diff --git a/docs/docs/how-tos/streaming-specific-nodes.md b/docs/docs/how-tos/streaming-specific-nodes.md new file mode 100644 index 0000000000..57ba5d7276 --- /dev/null +++ b/docs/docs/how-tos/streaming-specific-nodes.md @@ -0,0 +1,2 @@ +WARNING: DO NOT MODIFY/DELETE +This is a dummy file needed for mkdocs-redirects, as it is expecting redirects to be markdown files From 84fb8ad57d3e30b769c43b5d2038e0d471f33f1a Mon Sep 17 00:00:00 2001 From: vbarda Date: Thu, 30 Jan 2025 13:09:34 -0500 Subject: [PATCH 08/14] update streaming from tools --- docs/_scripts/notebook_convert.py | 2 + ...9-4147-42e3-89fd-d942b2b49f6c.msgpack.zlib | 1 - ...4-62df-4855-8219-d5e1a1a09be9.msgpack.zlib | 1 + ...c-53b9-4c53-87b2-d7263cda3b7b.msgpack.zlib | 1 + docs/docs/how-tos/index.md | 1 - ...-from-within-tools-without-langchain.ipynb | 372 --------------- .../streaming-events-from-within-tools.ipynb | 447 +++++++++++++----- .../streaming-events-from-within-tools.md | 2 + docs/docs/how-tos/streaming-subgraphs.ipynb | 2 +- docs/docs/how-tos/streaming-tokens.ipynb | 11 +- docs/docs/how-tos/streaming-tokens.md | 2 + docs/mkdocs.yml | 2 +- 12 files changed, 356 insertions(+), 488 deletions(-) delete mode 100644 docs/cassettes/streaming-events-from-within-tools-without-langchain_45c96a79-4147-42e3-89fd-d942b2b49f6c.msgpack.zlib create mode 100644 docs/cassettes/streaming-events-from-within-tools_2c30c7b4-62df-4855-8219-d5e1a1a09be9.msgpack.zlib create mode 100644 docs/cassettes/streaming-events-from-within-tools_8ae5051c-53b9-4c53-87b2-d7263cda3b7b.msgpack.zlib delete mode 100644 docs/docs/how-tos/streaming-events-from-within-tools-without-langchain.ipynb create mode 100644 docs/docs/how-tos/streaming-events-from-within-tools.md create mode 100644 docs/docs/how-tos/streaming-tokens.md diff --git a/docs/_scripts/notebook_convert.py b/docs/_scripts/notebook_convert.py index 6523372dcf..8a47c44cc1 100644 --- a/docs/_scripts/notebook_convert.py +++ b/docs/_scripts/notebook_convert.py @@ -22,6 +22,8 @@ def preprocess_cell(self, cell, resources, cell_index): ) elif cell.cell_type == "code": + # Remove noqa comments + cell.source = re.sub(r'#\s*noqa.*$', '', cell.source, flags=re.MULTILINE) # escape ``` in code cell.source = cell.source.replace("```", r"\`\`\`") # escape ``` in output diff --git a/docs/cassettes/streaming-events-from-within-tools-without-langchain_45c96a79-4147-42e3-89fd-d942b2b49f6c.msgpack.zlib b/docs/cassettes/streaming-events-from-within-tools-without-langchain_45c96a79-4147-42e3-89fd-d942b2b49f6c.msgpack.zlib deleted file mode 100644 index 108c689c48..0000000000 --- a/docs/cassettes/streaming-events-from-within-tools-without-langchain_45c96a79-4147-42e3-89fd-d942b2b49f6c.msgpack.zlib +++ /dev/null @@ -1 +0,0 @@ -eNrtGUtv48Y52566pwZo7wRRoEAhyqSot+GDLTu2d2PLtrT2rhNDGA2HIm1yhuYMZckLHbLpvWDQP9CsI6eGs0mQoE3Spuceit69h/yI/oJ8Q0lrOfbGm2SDJpAFQRQ13/vNT4+O2yTkLqO3Tl0qSIiwgBv+zqPjkOxHhIs/9n0iHGYdrVVr9cdR6J79wREi4OWpKRS4aRYQitw0Zv5U25jCDhJT8D3wSELmqMms7tNbrYeqTzhHLcLVsvLGQzVkHoFvasRJqKYUFTPgTYX86QBI/J4rLlWEQ5QmsULGfLW3A1A+s4gnYVqB0Mx0ThNR2GQSn4uQIB+ORBgRuBeMeUNWohskrOyIJqpJ8Gffy8pDlSI/AWgR0XAF8bmEsAjHoRsMgdR7nIA4LlckYfhQPMb2lChQDhwXO0qCpqCQjMRuuW1ClcBDmKQluQCFwAXMyxOWI5lYc5dgkQCEYMhQuGQAkGBeAAUNXdpSez0Alp5xQ2JJBYegO71eb6d37BBkAZM/HTmMi/jJRQd9iDAmYDlCMbOAWPxB69ANUopFbA8JcgJOoCSxS3yyR0igIQ/U6A+w4o9QEHguRvJ8apczejp0miZFvHx8In2rgcupiD+f5V2KqyDJ7PLUWhfCiSpGOltM6x91NC6QSz0ID81DIFQ/SM7/MX4QILwHlLRhqMb9AfKTcRjG4/dWEK7WLpBEIXbi91Do57OfjP8eRlS4PomPK2uX2Q0Pz9mZacNIlz6+QFhqFH+QXMrJp8v+foEIEWFXwwxoxX/Rn4yM5RHaEk782MyV3g8JDyBJyNt9QBMRf3QEjiH/+ffxMFnerd4defSrV357NA9Oir+sO1FKyeSVGgmUjJ7JKoZZzsq3srhSP60M2dSlT84UQTpiirTlL4MUmVYgRUNOxEwkbK34cT1ElNvgqIVRUBxjJ6J7xDqpXBkOX8pwAPWkPpDFGukEjBNtKGZ8el/bGNQNbXn+k0HsaSxsIeoeJrER/1V6GYRw6afDY4h9SRKYaz6PH2eymSfDk5EDTkBRXTN0TTc+l4mAIeik4AELQTGCoSqJbnyW8lFHRtyMaeTMvK7r05CP2IssUoua88wHnnxaCULiMWR90dFCMKjn+i54JfkcVjwIJAOQ9c8uQwi2RyiP38/pg9e/xkFCIjlINZ4ROirB659XA41oZSVMqVD44iIY+OiczuO8zz+7fD4k8a7OTzsjYM214rPfwU2jVLJLmWyuqBvZnNksZQyUMzMoj4hRKOaJaX9YeU2rIOwQrZaEX3w8/2B1dmW5clID2hWocC555+mtXzYa2G40/Zkt74Fr8s1Wox7ou5m2u7gxZ/Olzfn6HO8Yr7f9ppPLpdOr9vZ9rBmFTMHMFTMZUzPSetpIG1recMPD0kp1gRv39l+fx0v59U26udVebdJNI4PTa8JaQAW2d2fznrUdLBp364vtdQd7peDQQ12y7S1FS3ZtLXKKfrViNxcXzUYJr4M/kXBmpqYViEQoinxmmCAaJIgm0yNbNkbpMa1YSRTMpC9WxmllCVpalXrdacgrCCcCVyjYNSjsM6uMkrM/gw2itmvN4NqcjvT9/XzVt+nC3kaHNnfX1zfq1BGdg+2D2vycMGsru41wdn3MCLpZ1PSRHfRsMQmec9G/p1R/u6+N57tWTfoV+JEyTl3b7tdICCkUn2CPRRYU+ZD0wecbsw/iT4u4ZCIb3k0L42zW1Ba2NpJu/VZ/0Gqe/uqRhQSSPciFTqPK1o6hsWuzc7vZ9fVwkdYP767o+VVat7fszYM7lfbiAVJTo6Y2wEifDwPppLAAAIZCJGT3emafTGrU2y+2dsj3TA4weJdDh23YIBYJA5AOyNPI84CWw1wsmyZ0epdapKOW9RR0b08gtTyaM1QE1QVKLKClzkeNAQHZ0BsYed43aQyUhoMGn6XNzY3tpVX3TnHX3pqbe5AtvLa87wKxQXcemy3GRovRZDE+WKgobEU+sAduKjTznRQ0czviyBvI00upHmtBQWzykYCgtMudBtiMS6oJ1E7v9u2fv3Oea/pxG44b7OGbic1ujPRtRhrMgzdmusZMb6rlm2i63kzw/HVjpOuMNHhCvbHSdRnXmzQjXa/ruBnHdH5jvrq6sHP79svcwvyi/8O3MCnlHGtsqLqAqg73L6PgkJykB5XrpqkX2cuMBZQi54Gk2yW1fChjEmVSgm+ufWTkKWNh23hBmUbqSkR5f6VYY+pzhvd4SuEOI3CxIi6UZkSp3OzcLLG+yxLrq1d+fbPG+omtsfo42QrEZ//7iS8FfoTH9UsrvHyh+N1WeL+ZlBWeYRoTuMLLl176Ci/fzJQyum6TbMFCBdPCmWJWz+tG0SgWs6ade+4K7yWshggq4uzVq6FX//v80WwDVxe3816xUqwuNfWVlVknbK9XrCj//UYz8/+xGoIZ5kfYy/xcLHNuhmUYmyZTczkATazu5+P+ROqfmljHd1k0sbo7qD25GZ88tN7k+8S5XS4pbtw+aW5H1JpY3eVCbnIHu0nVPKIT+yTjTm6FT0+U4i/wZxMXLLjqb6avAWQIP1w= \ No newline at end of file diff --git a/docs/cassettes/streaming-events-from-within-tools_2c30c7b4-62df-4855-8219-d5e1a1a09be9.msgpack.zlib b/docs/cassettes/streaming-events-from-within-tools_2c30c7b4-62df-4855-8219-d5e1a1a09be9.msgpack.zlib new file mode 100644 index 0000000000..1843d8e654 --- /dev/null +++ b/docs/cassettes/streaming-events-from-within-tools_2c30c7b4-62df-4855-8219-d5e1a1a09be9.msgpack.zlib @@ -0,0 +1 @@ +eNrtnU9v3MYVwGv05kOPPbOLAGkNccX9L62gw1qSLVmWZXslR0piLGbJx+V4SQ49HK60Mnyo23vBol+gsSM1guMkcNAmadNbgR76BexDvkG/Qx9Xq0iCA80iK6kG8nSQVktyyPnNmzdv3rx5fLLXAxlzEV56zkMFktkK/4nTJ3sSHiYQq9/vBqA84Ty7vdpce5pI/uqKp1QU1ycnWcTzIoKQ8bwtgsleYdL2mJrEz5EPg2KetYXTf33pF49yAcQx60CcqxsfPMpJ4QN+yiUxyNyEkbMF3jtU2VdzLDT6IjEU+L4RgLGFRRpdHjqGcA2uIIgNbgS84ynDzb7loaE8MFzh+2KLhx0j8pkNdePdNjhSiODdvHGTx8rAUnxg+KFkxIntDYuKIWKSqew6LCUw2n2DGViBgOWNxqB0208cwC/bkoNrOBDbkkdZ5bLnATYsKZ97fB/rEQgH/KwWnUiZZWEGPORZ/WIlgQV4QMkEHu95wByk/odnnohV+uIkx8+YbQNeDqEtHHyw9NPODo8m8NauzxTsI6sQBq2U7ncBIpP5vAe7B1eln7Mo8rnNsuOTD2IRPh+yNVU/gjcP72dNYGLLhCr9uhH3Q3sVn6SxNHm7j60eGoV8pZwvfb5txorx0MdWNH2GD7UbDY7//fiBiNldLMkcSlS6e3Dxi+PniDj9eIXZq80TRTJpe+nHTAbV8svj38skVDyAdG/u9pu3Gx48ul0pXyjkp784UXBWo/TTwZ/64DcXfztRCCjZN22BZaV/tl4cwvIh7CgvfVqsWX+REEcoy/C7XbxMJfGTZ9gw8J9/7w1l+qPV5cMW/e5nv3w2j42UfrvmJRNGyTJuoDgXrWLFKEzVK1bdsozrK2vP54a3Wcva5BXK+raahF72zYGkzBjYk2QMajZRrjn1xZpkYexiQy0cCsWe7SVhF5z9uR8Uh28zccDqZfXBzmbCdiRiMIePmT7fMO8edG9zaf7lgeyZQnZYyHcGspF+krUyPgQPvxwejqTIisSbm0GcPp2yrBfDI4cNsI8VtcyCZVqFb7ZN7Fbg84AjzMHvoT7B9i9Z+PPVm2co0QVUPZ8UKtbBzz+PnyMhwKfJbn9UUnEaf/7xw2d9X1o5O2m6VPvm5HlI96ikp8Ug/urN48MyPrLi59uHJ5vcSV+9g/+0gFn2tAsFlxWhWrGdsmW7UxYwZtUK03a19jU2JrexlKz1IiGxdcFGDar66auJgG1n3W62VKiUqljXmUNd00za8yKrRDxjRBJ8wZzPbNe0UdWAeSCB6d785q3GytLcfhMfck6ILoc/vr7081bLdlvtYHZttXFnaWO5y27aJZevb7WW8+sbd/qbc+5WaaW0+F5oNRbuRe31yOuahVppqljNmsQs5K18IV8wweq9X72zsHh1feX20sY1pZbCUsIKttVbDK6WCg+u32t0nZ5bndsJa1Vxrb+6FXduvn9vsSmChirI9WWxcHdnZVMK5t29PrW5HFar3lYDa8OUNzs5Y6AwcuQ7O+wjJvYRM+sh0/XiYQ+ZMZwBg9n8SeU4Yyzi4LMa+v0Zo5nBBPzLAmiiEp69JUJ49SdkkPS4M+vOte4E6xs3Fkq3mgXnejPpNb319fV2ZVmVWwv3FjaDKSj2ytu3y9YxCOVyxbSGHKpWeWogh0eP/iOf6q8b5vEub65GB6PsXijikLvubhMk9qJ03/ZF4qCel7A7d82829hMv5y2WIlVikW7Xa1VWcUyF967u8d8FKaenb70SrO5erlcys0YAZudqpYtazDo/nY3E76w8/pf/3WYYnXjUY47uXouG6FtHJ/NRm9N9XhYY+vTfH4nqT501uza9g1wvLUIx+ScaD9AtTK8In80pucHigdPsFFRKcAyD+BlXXHicAA8Pv6ZqBXKplUzC1N4FY42PW5DS3Ec+es5HNRY4qvsQD/GcbSFY3oHZISPnt3bjVq1Ijg11q6U7eyensCL0YhAGwIHf9jO1a0JLMRXLFc/NCtyDLUUdpQwK/Z7yyKH/0hwk5jh84WJ7z+eyPmig1qtHR98MZHDm/PYa2HFcGQcnnX/8eXLPx2CR7iaKMU5YnQqo18RoNMBGYtAUqSFxIiRlpHyJBAlHaVs5ooUCNPpmAZzf6KkodQXCTHSMBo4ooiShlLmqCNIOqVEeltrKBEhDaGh05s4nc6p/mH4IXU3DaQCATodUJ4AadTRlSuE6HREV4EsIw0iEiLtcEaANIpozeM04dfOQAiR1g3Zj7jNfL9PpLQOW3LXav0iAmWJKGkoRSIrgSidTkm4hIhUEjmQLobTBAHSLkWGWRQOlkaoSHWT5//cVyKZUhJimsLpQMVJlMXikt9NbwvQLJf00hkYlMRI5weQLKCpCa22jSlGS+QlGSFAEshEolCksRH1mOQiIVHSWtt8hzqcltKvidDphLIt28RINxmhjqZdAtiisY182+NK0cMEgMSIxGhcv78kRBpEXVo/0kL6DQHSGUa0/UjLKIkTimsbyTcyyNJCpvYIjn+HtLeek8+75PvXupE8AEV9jkzucWNJeZYmkASJBIlMyvO3AHwWdkltj7IPgDwBI6zcIiZa4dZxchJJVvcIBqUPEBElTcgNpUnQQyoSIArcojQJ54voFuXZGiFIQtGshLJJUDaJc9fXDSKkM68D5tPOdm00Emv75PunMInxnSOszUMgA0nr/s/eDUQmkn7pNuYOKSbKJ0Hb/2ix7a1IkKAo3naECC7qa3qdLQiRBpEnfJIj2vg/dlwbC2iBjYb+cfuZzyRlI9PO/31hd4kSdTbytJ23BwkkUqCsrfSOLQr+vzAfkhBdEiVS3mMj6vgsjmnrFgVsjStIFI40ajgSdTatRSlFj1bbRkoo3YOQA70LQB99o4RkHRIp2sR1FumSpctsMppoE9cZMAozu0lxSnarJQU4UQkVZz51PNqkND6kEgGiOS9tUjpfRPME6HRA2ftJgKwk2qFEO5Roh9L/PRMQ2DQNoVeTjT/vT2TIVSJJmHSktriiJO7a97hhqTyibYH6RDeSbYEk9whtCyDn7YWtKlFqqZGCcZVHCyYUjjv28qSN58XY6UiYSJhorfsCZruot8kOoBB4CoGnhbe3R4zm6bXuekrkCRglSxDFSYzwXld6Bw69H/hM3ntLkGhOcgZetz4tBpAkURali8p+w3rU3Sj7zdgLSj6jfUoj7i0hTjrNTWpbn9eNkk3RGsm4AUoetylCidxIZ5H2lhBRXBLFJV1IICDYQjLFad5GK5KUSYkcAG9JHDeXkgSJVv9/DCA9k1w8mPAeUflgfvXWwv3Ll/8HjuzTXg== \ No newline at end of file diff --git a/docs/cassettes/streaming-events-from-within-tools_8ae5051c-53b9-4c53-87b2-d7263cda3b7b.msgpack.zlib b/docs/cassettes/streaming-events-from-within-tools_8ae5051c-53b9-4c53-87b2-d7263cda3b7b.msgpack.zlib new file mode 100644 index 0000000000..c306d31743 --- /dev/null +++ b/docs/cassettes/streaming-events-from-within-tools_8ae5051c-53b9-4c53-87b2-d7263cda3b7b.msgpack.zlib @@ -0,0 +1 @@ +eNrtFwtwE2W6tQwwUrHMIaiIXaIMWrvJbt5pKRDSlhbbpm3SlpZK+LP5k2y72d3ubtomvYI8xDtBIQg6AyIDlBZqKeUhL0UUFAEPr5VaeQjceALigzeI1JP7N02lFefOu8GZc86dSXb///v+7/36ZzZUQkGkOTa6iWYlKABKQgtx4cwGAVb4oSjNrvdBycu56nKtNvsqv0AfSfBKEi8mqVSAp5UcD1lAKynOp6okVZQXSCr0zTMwTKbOybkCR6NfrlH4oCgCDxQVSdjkGgXFIV6shBaKKnQEoyXoEzEgQIxmMckLMc7tpik4VpGIKQSOgTKiX4SCovYptOPjXJCRtzy8hGs53EeztIzJoj0SvUVJgMCHFm7AiBBtIOo8Uk3yCzIlQmmQ9ziOiUgjBfgwB7efDWsv0/rxOwmrUbDAF0bwQMkRFlXGcEGREmg+gqQoECGSnBYxmTD6wxha7FaMYyHmoz1eCXPTrEvWEWA8AyiIBTj/KKQ1EMuhCwNOzi8pZdo8EBBL5A4xzJ8XkJkFiYaRpXw0/NUtOdKYZj2K2lrZXshvtABdsm4RVNlo3aicswxSEkKtfaq2wQuBCzE5ERVX5+VEKdTc26XrAUVBZGPIUpwLMQit8wRpPhFzQTcDJNiI3MjCsJlCjeUQ8jhg6EpY33Uq1AJ4nqEpIMNVZSLHNkXcjsuy3A5ulD2MoyBhpdB2sxhgKSuSxJypyg2gAGQxUqnTKjUt1bgoAZplUEDhDEBC1fNh+Bs9ATygyhElPBLcofquw809cTgxtDobUFZbL5JAoLyh1UDw6bWbeu4LflaifTDUYMm9nV0EeIudRkmSStOGXoRljULrwq+k8D/Nbe1FBEpCAKc4RCu0gqinOK6chqEjNgfl9lfSrhTtJCuTLuQHysuVnDrdRBbm6yxpmUX2fFCsoQwgr2p8NkEGiAyKLcBJg0ZvNJImNaHWqnFCSShJJYnrCa2RkJ/mbk8wkPVI3tAqLWFYI0CRRzkLZ9UjmSS/OLMOeR3+ZV9DJHdXWp+8FTBD61JRBIR22r3+RExDYBMBiyFmOow0JmlNSToDNiHb3mSJsLH/rMM32AXAim7k9LTuAGugvH4W5UKj5WdDa6ccWshUsvioLuCwmudEiEekCjVNwvO7qhaembqpK45xTvAAlg6G2YbWyhGDqhTNbo6AUW7JJBFz3CciQ+h0zRFItzMbkV4EThI4Qe6oxlEZgQzto5Htwv+RMoliSSNbdtvtGBJXDlkxtJbUEV3PWz1xBOhD0sjsb1FSm9Dz5s9j/UhNKyOZDNodvfFE2EOmVWqfuO12eITGSkJsqu5GxmlX6MijaOEwaUyEkzLotE6XS+92awid3qTVEnoTMLhIJ2ncLlcbClGRvcdzgoSLkEKNQQqEjiT6QLWcwikaUqfRI12TUbWjGL8L2vzOVE5WQkzGeAEyHHCtp9w4BSgvxLsCLtSQWpxjzs60NNqQkJZw/C88Gh3jQBngcPpSvMFMKxsozvGUcWV2nZ2tspvSKLMt22Z051GZVflsGV/iMfrtai0hJ4BRrUeGNOBkJPrzHDbhScmSYc5KNZkhn0loyrVer9kPA3bLkw52Yi6wUNYsPyxgxgsuh71IZylyZ44vLOYmeGE14DOq0+hqIZ2dZHZMVBYJaqOl0JdVaUbaAMmbokrGUDCiuiumRFICRymBywlhSiK7EyIZc4VtkKLsXWiTsQzUU60sE0jGbLIxIXqjDmBD/SMlB7WPI4u6q0BVXkEaKHIw2XqlnWMITaEz6CskGE2JLr1AKM6oyqmQ0pnUkoKiYnMPI+hM5O1V4Jbo/6VUWybhPTMct4a7IfIjy4ks7XbX26CAsijUSDGc34V6hgDrLel4vrk4tNlEAA3QkaTWbTJp1SaApxXlt3RT+7Ee1MkNpwEwKMYqqdAmryZFkaTVahTJmA+kGPVaggiPGDPquzrge9E34uf2jwo/Meh38+Y8WzZ3dFzczs6iptPznuBH9D0ZLEtYfPcSe2N+Qnnx/IP6zsXHK+yzr59ffM/hOfX5tS3pl08ffH/pMsu79++7S93X+X6mt+Gze9fPTz33w+WtZ04VpYwiJyfuGp2y6Gqw9qZ99xpw+BvVYAns/ENh8jOPrjO9s/uRUczGnI++vfy5sjjxoyGtW8Z93Jow4WL73pIHHxj1d9eD8w9Vq8duqiq8O3bBhvhB1LzhR6cfanj8gsHFjN5RrZ3z9hOzmwY4Y98seL5fdX91nKHPqocHd8QtqffbntBF7StZPKDZ7rh+ZXjfStWQooSTL6nOkbMLliUWnj4UvHqp4mbLlGmGWcGG9ppP9PaOdElxz4UPlx8fTMXGfoJvHX9g5IqBBtfepzsbO7xr6IcGDuOfG/DpuC3Rk1/aSM7n3zOlc0fG9nNWJ3719sfFqzfdPLy/mTn15vE5w+5bsSh9zlPnQs9WLlmxpm11yeR+X762c8blwuQD+8esoj86sawq9h+Hvnxn/9exC0c2B6cHLzrgBwMeVR57vVKVyW/Ys9JzTfXcJ89sjsEezPqhSsOdGpOtyr469mzNuP7S7LRzo4fkl9776bMBZuB5B1i217cnrnXXxBmmzCsH9XcnBtblnIkC8f39UybaVlRN2WKIXv3BoT5zF3EeW3QbeSNGdnBMVEb6hfwRfaKi7uTIetfuOzeyJmI9T7N+humBAlDrQdUPgSIzqYMCzL8aTGl5sFPISA6nu4K3i2KuX23KZTPzcismBGx52ZN+4fwKBI/fh2SSeSlqSrtGxVK0KFV0qVKqqFXIU2Jv+RVOVJfFRAwZkaLR3IK+UDNAtUHsqbmsSS+NHL9E7t+n+9+n+/+z6f7Sr9LX/6cnptsuHwZC/59dPob8m8uH6bd5+agj9YTmN3X70Bvu+O1DbXDpCTVw6oxGnYkyEFqDSUsCgnQBp0sHtPDXv33cganWoDZC5x2cavf/dKrNz+aOEbFoqo1LOpb22Wr/kskbovc8/c3Gew6D+2PTVvqaNz6WIXU075se//UfjWbD1KHDr19/9cQj3y2MfmxjWcwBqSQrvvRiS2fCpQvXLp9f1pm/c1f5jZGVW5e/Vdt+/lUm97t9f/vGqhWcx+8/N8sy4lq/w6ntM1d9uTG4SXn+QGdn2V3BLV+klwyrm1TR9mLDytJFrznz7vvgaug4+UBWWcxJTdTTL58dXhucay2Lmfu9/Sw58vjKo68sjbK7DIWLcv680KtKWG7eM6XV2jG2uvXhbfS+qQP/Onqc1TeqdcFWbPj4PR9eVF3CF0xdrm+B07JMfa7MKT1grhkwY17soEMHT6kGLR1xllwb/0pyx7eeYy8ciwssfeH69hMLGto/HuR3ji3J2m3O/mJMzHXyec+2iU3jPx+TgX//p2mDS9ffK5RNORD70vttj4M2tlk1dFTH4lJfnna7RDrg5rOtH7YHHgi2vDt0XOWgG68v3UXHz3yRuzlhWt9572Der+LaFm7PGb017SHumsN3ZoR1TsHubY/Mej1ub8WgIcNStu0ZufbKK306d4w46YwNDTzTPntHQlthsOYNdcIxz/SoriH186nJp0uR8/4JoeNGFw== \ No newline at end of file diff --git a/docs/docs/how-tos/index.md b/docs/docs/how-tos/index.md index 9b07bfad69..4727dccce8 100644 --- a/docs/docs/how-tos/index.md +++ b/docs/docs/how-tos/index.md @@ -85,7 +85,6 @@ See the below guides for how-to implement human-in-the-loop workflows with the ( - [How to stream LLM tokens](streaming-tokens.ipynb) - [How to stream LLM tokens from specific nodes](streaming-specific-nodes.ipynb) - [How to stream events from within a tool](streaming-events-from-within-tools.ipynb) -- [How to stream events from within a tool without LangChain models](streaming-events-from-within-tools-without-langchain.ipynb) - [How to stream from subgraphs](streaming-subgraphs.ipynb) - [How to disable streaming for models that don't support it](disable-streaming.ipynb) diff --git a/docs/docs/how-tos/streaming-events-from-within-tools-without-langchain.ipynb b/docs/docs/how-tos/streaming-events-from-within-tools-without-langchain.ipynb deleted file mode 100644 index face9feb23..0000000000 --- a/docs/docs/how-tos/streaming-events-from-within-tools-without-langchain.ipynb +++ /dev/null @@ -1,372 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "18e6e213-b398-4a7e-b342-ba225e97b424", - "metadata": {}, - "source": [ - "# How to stream events from within a tool (without LangChain LLMs / tools)\n", - "\n", - "\n", - "
\n", - "

Prerequisites

\n", - "

\n", - " This guide assumes familiarity with the following:\n", - "

\n", - "

\n", - "
\n", - "\n", - "In this guide, we will demonstrate how to stream tokens from tools used by a custom ReAct agent, without relying on LangChain’s chat models or tool-calling functionalities. \n", - "\n", - "We will use the OpenAI client library directly for the chat model interaction. The tool execution will be implemented from scratch.\n", - "\n", - "This showcases how LangGraph can be utilized independently of built-in LangChain components like chat models or tools.\n", - "\n", - "## Setup\n", - "\n", - "First, let's install the required packages and set our API keys" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "47f79af8-58d8-4a48-8d9a-88823d88701f", - "metadata": {}, - "outputs": [], - "source": [ - "%%capture --no-stderr\n", - "%pip install -U langgraph openai" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "0cf6b41d-7fcb-40b6-9a72-229cdd00a094", - "metadata": {}, - "outputs": [], - "source": [ - "import getpass\n", - "import os\n", - "\n", - "\n", - "def _set_env(var: str):\n", - " if not os.environ.get(var):\n", - " os.environ[var] = getpass.getpass(f\"{var}: \")\n", - "\n", - "\n", - "_set_env(\"OPENAI_API_KEY\")" - ] - }, - { - "cell_type": "markdown", - "id": "d8df7b58", - "metadata": {}, - "source": [ - "
\n", - "

Set up LangSmith for LangGraph development

\n", - "

\n", - " Sign up for LangSmith to quickly spot issues and improve the performance of your LangGraph projects. LangSmith lets you use trace data to debug, test, and monitor your LLM apps built with LangGraph — read more about how to get started here. \n", - "

\n", - "
" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "7d766c7d-34ea-455b-8bcb-f2f12d100e1d", - "metadata": {}, - "source": [ - "## Define the graph\n", - "\n", - "### Define a node that will call OpenAI API" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "d59234f9-173e-469d-a725-c13e0979663e", - "metadata": {}, - "outputs": [], - "source": [ - "from openai import AsyncOpenAI\n", - "from langchain_core.language_models.chat_models import ChatGenerationChunk\n", - "from langchain_core.messages import AIMessageChunk\n", - "from langchain_core.runnables.config import (\n", - " ensure_config,\n", - " get_callback_manager_for_config,\n", - ")\n", - "\n", - "openai_client = AsyncOpenAI()\n", - "# define tool schema for openai tool calling\n", - "\n", - "tool = {\n", - " \"type\": \"function\",\n", - " \"function\": {\n", - " \"name\": \"get_items\",\n", - " \"description\": \"Use this tool to look up which items are in the given place.\",\n", - " \"parameters\": {\n", - " \"type\": \"object\",\n", - " \"properties\": {\"place\": {\"type\": \"string\"}},\n", - " \"required\": [\"place\"],\n", - " },\n", - " },\n", - "}\n", - "\n", - "\n", - "async def call_model(state, config=None):\n", - " config = ensure_config(config | {\"tags\": [\"agent_llm\"]})\n", - " callback_manager = get_callback_manager_for_config(config)\n", - " messages = state[\"messages\"]\n", - "\n", - " llm_run_manager = callback_manager.on_chat_model_start({}, [messages])[0]\n", - " response = await openai_client.chat.completions.create(\n", - " messages=messages, model=\"gpt-3.5-turbo\", tools=[tool], stream=True\n", - " )\n", - "\n", - " response_content = \"\"\n", - " role = None\n", - "\n", - " tool_call_id = None\n", - " tool_call_function_name = None\n", - " tool_call_function_arguments = \"\"\n", - " async for chunk in response:\n", - " delta = chunk.choices[0].delta\n", - " if delta.role is not None:\n", - " role = delta.role\n", - "\n", - " if delta.content:\n", - " response_content += delta.content\n", - " llm_run_manager.on_llm_new_token(delta.content)\n", - "\n", - " if delta.tool_calls:\n", - " # note: for simplicity we're only handling a single tool call here\n", - " if delta.tool_calls[0].function.name is not None:\n", - " tool_call_function_name = delta.tool_calls[0].function.name\n", - " tool_call_id = delta.tool_calls[0].id\n", - "\n", - " # note: we're wrapping the tools calls in ChatGenerationChunk so that the events from .astream_events in the graph can render tool calls correctly\n", - " tool_call_chunk = ChatGenerationChunk(\n", - " message=AIMessageChunk(\n", - " content=\"\",\n", - " additional_kwargs={\"tool_calls\": [delta.tool_calls[0].dict()]},\n", - " )\n", - " )\n", - " llm_run_manager.on_llm_new_token(\"\", chunk=tool_call_chunk)\n", - " tool_call_function_arguments += delta.tool_calls[0].function.arguments\n", - "\n", - " if tool_call_function_name is not None:\n", - " tool_calls = [\n", - " {\n", - " \"id\": tool_call_id,\n", - " \"function\": {\n", - " \"name\": tool_call_function_name,\n", - " \"arguments\": tool_call_function_arguments,\n", - " },\n", - " \"type\": \"function\",\n", - " }\n", - " ]\n", - " else:\n", - " tool_calls = None\n", - "\n", - " response_message = {\n", - " \"role\": role,\n", - " \"content\": response_content,\n", - " \"tool_calls\": tool_calls,\n", - " }\n", - " return {\"messages\": [response_message]}" - ] - }, - { - "cell_type": "markdown", - "id": "3a3877e8-8ace-40d5-ad04-cbf21c6f3250", - "metadata": {}, - "source": [ - "### Define our tools and a tool-calling node" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "b90941d8-afe4-42ec-9262-9c3b87c3b1ec", - "metadata": {}, - "outputs": [], - "source": [ - "import json\n", - "from langchain_core.callbacks import adispatch_custom_event\n", - "\n", - "\n", - "async def get_items(place: str) -> str:\n", - " \"\"\"Use this tool to look up which items are in the given place.\"\"\"\n", - "\n", - " # this can be replaced with any actual streaming logic that you might have\n", - " def stream(place: str):\n", - " if \"bed\" in place: # For under the bed\n", - " yield from [\"socks\", \"shoes\", \"dust bunnies\"]\n", - " elif \"shelf\" in place: # For 'shelf'\n", - " yield from [\"books\", \"penciles\", \"pictures\"]\n", - " else: # if the agent decides to ask about a different place\n", - " yield \"cat snacks\"\n", - "\n", - " tokens = []\n", - " for token in stream(place):\n", - " await adispatch_custom_event(\n", - " # this will allow you to filter events by name\n", - " \"tool_call_token_stream\",\n", - " {\n", - " \"function_name\": \"get_items\",\n", - " \"arguments\": {\"place\": place},\n", - " \"tool_output_token\": token,\n", - " },\n", - " # this will allow you to filter events by tags\n", - " config={\"tags\": [\"tool_call\"]},\n", - " )\n", - " tokens.append(token)\n", - "\n", - " return \", \".join(tokens)\n", - "\n", - "\n", - "# define mapping to look up functions when running tools\n", - "function_name_to_function = {\"get_items\": get_items}\n", - "\n", - "\n", - "async def call_tools(state):\n", - " messages = state[\"messages\"]\n", - "\n", - " tool_call = messages[-1][\"tool_calls\"][0]\n", - " function_name = tool_call[\"function\"][\"name\"]\n", - " function_arguments = tool_call[\"function\"][\"arguments\"]\n", - " arguments = json.loads(function_arguments)\n", - "\n", - " function_response = await function_name_to_function[function_name](**arguments)\n", - " tool_message = {\n", - " \"tool_call_id\": tool_call[\"id\"],\n", - " \"role\": \"tool\",\n", - " \"name\": function_name,\n", - " \"content\": function_response,\n", - " }\n", - " return {\"messages\": [tool_message]}" - ] - }, - { - "cell_type": "markdown", - "id": "6685898c-9a1c-4803-a492-bd70574ebe38", - "metadata": {}, - "source": [ - "### Define our graph" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "228260be-1f9a-4195-80e0-9604f8a5dba6", - "metadata": {}, - "outputs": [], - "source": [ - "import operator\n", - "from typing import Annotated, Literal\n", - "from typing_extensions import TypedDict\n", - "\n", - "from langgraph.graph import StateGraph, END, START\n", - "\n", - "\n", - "class State(TypedDict):\n", - " messages: Annotated[list, operator.add]\n", - "\n", - "\n", - "def should_continue(state) -> Literal[\"tools\", END]:\n", - " messages = state[\"messages\"]\n", - " last_message = messages[-1]\n", - " if last_message[\"tool_calls\"]:\n", - " return \"tools\"\n", - " return END\n", - "\n", - "\n", - "workflow = StateGraph(State)\n", - "workflow.add_edge(START, \"model\")\n", - "workflow.add_node(\"model\", call_model) # i.e. our \"agent\"\n", - "workflow.add_node(\"tools\", call_tools)\n", - "workflow.add_conditional_edges(\"model\", should_continue)\n", - "workflow.add_edge(\"tools\", \"model\")\n", - "graph = workflow.compile()" - ] - }, - { - "cell_type": "markdown", - "id": "d046e2ef-f208-4831-ab31-203b2e75a49a", - "metadata": {}, - "source": [ - "## Stream tokens from within the tool\n", - "\n", - "Here, we'll use the `astream_events` API to stream back individual events. Please see [astream_events](https://python.langchain.com/docs/concepts/#astream_events) for more details." - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "45c96a79-4147-42e3-89fd-d942b2b49f6c", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tool token socks\n", - "Tool token shoes\n", - "Tool token dust bunnies\n" - ] - } - ], - "source": [ - "async for event in graph.astream_events(\n", - " {\"messages\": [{\"role\": \"user\", \"content\": \"what's in the bedroom\"}]}, version=\"v2\"\n", - "):\n", - " tags = event.get(\"tags\", [])\n", - " if event[\"event\"] == \"on_custom_event\" and \"tool_call\" in tags:\n", - " print(\"Tool token\", event[\"data\"][\"tool_output_token\"])" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.4" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/docs/how-tos/streaming-events-from-within-tools.ipynb b/docs/docs/how-tos/streaming-events-from-within-tools.ipynb index 522cc61f32..2ad9beb1c0 100644 --- a/docs/docs/how-tos/streaming-events-from-within-tools.ipynb +++ b/docs/docs/how-tos/streaming-events-from-within-tools.ipynb @@ -3,50 +3,82 @@ { "attachments": {}, "cell_type": "markdown", - "id": "04b012ac-e0b5-483e-a645-d13d0e215aad", + "id": "695d935e-b4fe-45a6-a061-a66d32cb832b", "metadata": {}, "source": [ "# How to stream data from within a tool\n", "\n", - "
\n", - "

Prerequisites

\n", - "

\n", - " This guide assumes familiarity with the following:\n", - "

\n", - "

\n", - "
\n", + "!!! info \"Prerequisites\"\n", + "\n", + " This guide assumes familiarity with the following:\n", + " \n", + " - [Streaming](../../concepts/streaming/)\n", + " - [Chat Models](https://python.langchain.com/docs/concepts/chat_models/)\n", + " - [Tools](https://python.langchain.com/docs/concepts/tools/)\n", + "\n", + "If your graph calls tools that use LLMs or any other streaming APIs, you might want to surface partial results during the execution of the tool, especially if the tool takes a longer time to run.\n", + "\n", + "1. To stream LLM tokens generated by a tool calling an LLM you can use [`stream_mode=\"messages\"`](../streaming#stream_modemessages):\n", + "\n", + " ```python\n", + " from langgraph.graph import StateGraph, MessagesState\n", + " from langchain_openai import ChatOpenAI\n", + " \n", + " model = ChatOpenAI()\n", + " \n", + " def tool(tool_arg: str):\n", + " model.invoke(tool_arg)\n", + " ...\n", + " \n", + " def call_tools(state: MessagesState):\n", + " tool_call = get_tool_call(state)\n", + " tool_result = tool(**tool_call[\"args\"])\n", + " ...\n", + " \n", + " graph = (\n", + " StateGraph(MessagesState)\n", + " .add_node(call_tools)\n", + " ...\n", + " .compile()\n", + " \n", + " for msg, metadata in graph.stream(\n", + " inputs,\n", + " # highlight-next-line\n", + " stream_mode=\"messages\"\n", + " ):\n", + " print(msg)\n", + " ```\n", "\n", - "If your graph involves tools that invoke LLMs (or any other LangChain `Runnable` objects like other graphs, `LCEL` chains, or retrievers), you might want to surface partial results during the execution of the tool, especially if the tool takes a longer time to run.\n", + "2. To stream **arbitrary** data from inside a tool you can use [`stream_mode=\"custom\"`](../streaming#stream_modemessages) and `get_stream_writer()`:\n", "\n", - "A common scenario is streaming LLM tokens generated by a tool calling an LLM, though this applies to any use of Runnable objects. \n", + " ```python\n", + " # highlight-next-line\n", + " from langgraph.utils.config import get_stream_writer\n", + " \n", + " def tool(tool_arg: str):\n", + " writer = get_stream_writer()\n", + " for chunk in custom_data_stream():\n", + " # stream any arbitrary data\n", + " # highlight-next-line\n", + " writer(chunk)\n", + " ...\n", + " \n", + " for chunk in graph.stream(\n", + " inputs,\n", + " # highlight-next-line\n", + " stream_mode=\"custom\"\n", + " ):\n", + " print(chunk)\n", + " ```\n", "\n", - "This guide shows how to stream data from within a tool using the `astream` API with `stream_mode=\"messages\"` and also the more granular `astream_events` API. The `astream` API should be sufficient for most use cases.\n", + "!!! note \"Using without LangChain\"\n", + "\n", + " If you need to stream data from inside tools **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/#stream_modecustom). Check out the [example below](#example-without-langchain) to learn more.\n", + "\n", + "!!! warning \"Async in Python < 3.11\"\n", + " \n", + " When using Python < 3.11 with async code, please ensure you manually pass the `RunnableConfig` through to the chat model when invoking it like so: `model.ainvoke(..., config)`.\n", + " The stream method collects all events from your nested code using a streaming tracer passed as a callback. In 3.11 and above, this is automatically handled via [contextvars](https://docs.python.org/3/library/contextvars.html); prior to 3.11, [asyncio's tasks](https://docs.python.org/3/library/asyncio-task.html#asyncio.create_task) lacked proper `contextvar` support, meaning that the callbacks will only propagate if you manually pass the config through. We do this in the `call_model` function below.\n", "\n", "## Setup\n", "\n", @@ -55,8 +87,8 @@ }, { "cell_type": "code", - "execution_count": 3, - "id": "47f79af8-58d8-4a48-8d9a-88823d88701f", + "execution_count": 1, + "id": "b364dfe2-010b-4588-8489-fb4d8be1f200", "metadata": {}, "outputs": [], "source": [ @@ -66,10 +98,18 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 2, "id": "0cf6b41d-7fcb-40b6-9a72-229cdd00a094", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdin", + "output_type": "stream", + "text": [ + "OPENAI_API_KEY: ········\n" + ] + } + ], "source": [ "import getpass\n", "import os\n", @@ -103,34 +143,20 @@ "source": [ "## Define the graph\n", "\n", - "We'll use a prebuilt ReAct agent for this guide" - ] - }, - { - "cell_type": "markdown", - "id": "9378fd4a-69e4-49e2-b34c-a98a0505ea35", - "metadata": {}, - "source": [ - "
\n", - "

ASYNC IN PYTHON<=3.10

\n", - "

\n", - "Any Langchain `RunnableLambda`, a `RunnableGenerator`, or `Tool` that invokes other runnables and is running async in python<=3.10, will have to propagate callbacks to child objects **manually**. This is because LangChain cannot automatically propagate callbacks to child objects in this case.\n", - " \n", - "This is a common reason why you may fail to see events being emitted from custom runnables or tools.\n", - "

\n", - "
" + "We'll use a [prebuilt ReAct agent][langgraph.prebuilt.chat_agent_executor.create_react_agent] for this guide:" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 3, "id": "f1975577-a485-42bd-b0f1-d3e987faf52b", "metadata": {}, "outputs": [], "source": [ "from langchain_core.callbacks import Callbacks\n", - "from langchain_core.messages import HumanMessage\n", "from langchain_core.tools import tool\n", + "from langchain_core.messages import AIMessageChunk\n", + "from langchain_core.runnables import RunnableConfig\n", "\n", "from langgraph.prebuilt import create_react_agent\n", "from langchain_openai import ChatOpenAI\n", @@ -139,25 +165,32 @@ "@tool\n", "async def get_items(\n", " place: str,\n", - " callbacks: Callbacks, # <--- Manually accept callbacks (needed for Python <= 3.10)\n", + " # Manually accept config (needed for Python <= 3.10)\n", + " # highlight-next-line\n", + " config: RunnableConfig,\n", ") -> str:\n", - " \"\"\"Use this tool to look up which items are in the given place.\"\"\"\n", - " # Attention when using async, you should be invoking the LLM using ainvoke!\n", - " # If you fail to do so, streaming will not WORK.\n", - " return await llm.ainvoke(\n", + " \"\"\"Use this tool to list items one might find in a place you're asked about.\"\"\"\n", + " # Attention: when using async, you should be invoking the LLM using ainvoke!\n", + " # If you fail to do so, streaming will NOT work.\n", + " response = await llm.ainvoke(\n", " [\n", " {\n", " \"role\": \"user\",\n", - " \"content\": f\"Can you tell me what kind of items i might find in the following place: '{place}'. \"\n", - " \"List at least 3 such items separating them by a comma. And include a brief description of each item..\",\n", + " \"content\": (\n", + " f\"Can you tell me what kind of items i might find in the following place: '{place}'. \"\n", + " \"List at least 3 such items separating them by a comma. And include a brief description of each item.\"\n", + " ),\n", " }\n", " ],\n", - " {\"callbacks\": callbacks},\n", + " # highlight-next-line\n", + " config,\n", " )\n", + " return response.content\n", "\n", "\n", - "llm = ChatOpenAI(model_name=\"gpt-4o\")\n", + "llm = ChatOpenAI(model_name=\"gpt-4o-mini\")\n", "tools = [get_items]\n", + "# contains `agent` (tool-calling LLM) and `tools` (tool executor) nodes\n", "agent = create_react_agent(llm, tools=tools)" ] }, @@ -166,88 +199,284 @@ "id": "15cb55cc-b59d-4743-b6a3-13db75414d2c", "metadata": {}, "source": [ - "## Using stream_mode=\"messages\"\n", - "\n", - "Using `stream_mode=\"messages\"` is a good option if you don't have any complex LCEL logic inside of nodes (or you don't need super granular progress from within the LCEL chain)." + "## Streaming LLM tokens" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 4, "id": "4c9cdad3-3e9a-444f-9d9d-eae20b8d3486", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Certainly|!| Here| are| three| items| you| might| find| in| a| bedroom|:\n", + "\n", + "|1|.| **|Bed|**|:| A| bed| is| a| piece| of| furniture| used| for| sleeping| or| resting|.| It| typically| consists| of| a| mattress| supported| by| a| frame| and| may| include| bedding| such| as| sheets|,| blankets|,| and| pillows| for| comfort|.\n", + "\n", + "|2|.| **|D|resser|**|:| A| dresser| is| a| storage| piece| of| furniture| with| drawers|,| used| for| organizing| clothing| and| personal| items|.| It| often| features| a| flat| surface| on| top|,| which| can| be| used| for| displaying| decorative| items| or| personal| care| products|.\n", + "\n", + "|3|.| **|Night|stand|**|:| A| night|stand| is| a| small| table| located| beside| the| bed|,| providing| a| convenient| place| to| keep| items| such| as| a| lamp|,| alarm| clock|,| books|,| or| glasses|.| It| often| includes| one| or| more| drawers| for| additional| storage|.|" + ] + } + ], "source": [ - "final_message = \"\"\n", + "inputs = {\n", + " \"messages\": [ # noqa\n", + " {\"role\": \"user\", \"content\": \"what items are in the bedroom?\"}\n", + " ]\n", + "}\n", "async for msg, metadata in agent.astream(\n", - " {\"messages\": [(\"human\", \"what items are on the shelf?\")]}, stream_mode=\"messages\"\n", + " inputs,\n", + " # highlight-next-line\n", + " stream_mode=\"messages\",\n", "):\n", - " # Stream all messages from the tool node\n", " if (\n", - " msg.content\n", - " and not isinstance(msg, HumanMessage)\n", + " isinstance(msg, AIMessageChunk)\n", + " and msg.content\n", + " # Stream all messages from the tool node\n", + " # highlight-next-line\n", " and metadata[\"langgraph_node\"] == \"tools\"\n", - " and not msg.name\n", " ):\n", - " print(msg.content, end=\"|\", flush=True)\n", - " # Final message should come from our agent\n", - " if msg.content and metadata[\"langgraph_node\"] == \"agent\":\n", - " final_message += msg.content" + " print(msg.content, end=\"|\", flush=True)" ] }, { - "attachments": {}, "cell_type": "markdown", - "id": "81656193-1cbf-4721-a8df-0e316fd510e5", + "id": "6d8fa9fc-19af-47d6-9031-ee1720c51aa2", "metadata": {}, "source": [ - "## Using stream events API\n", + "## Streaming custom data" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "38eaf453-9773-424d-a110-9e1038a69805", + "metadata": {}, + "outputs": [], + "source": [ + "from langgraph.utils.config import get_stream_writer\n", "\n", - "For simplicity, the `get_items` tool doesn't use any complex LCEL logic inside it -- it only invokes an LLM.\n", "\n", - "However, if the tool were more complex (e.g., using a RAG chain inside it), and you wanted to see more granular events from within the chain, then you can use the astream events API.\n", + "@tool\n", + "async def get_items(place: str) -> str:\n", + " \"\"\"Use this tool to list items one might find in a place you're asked about.\"\"\"\n", + " # highlight-next-line\n", + " writer = get_stream_writer()\n", "\n", - "The example below only illustrates how to invoke the API.\n", + " # this can be replaced with any actual streaming logic that you might have\n", + " items = [\"books\", \"penciles\", \"pictures\"]\n", + " for chunk in items:\n", + " # highlight-next-line\n", + " writer({\"custom_tool_data\": chunk})\n", "\n", - "
\n", - "

Use async for the astream events API

\n", - "

\n", - " You should generally be using `async` code (e.g., using `ainvoke` to invoke the llm) to be able to leverage the astream events API properly.\n", - "

\n", - "
" + " return \", \".join(items)\n", + "\n", + "\n", + "tools = [get_items]\n", + "# contains `agent` (tool-calling LLM) and `tools` (tool executor) nodes\n", + "agent = create_react_agent(llm, tools=tools)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "8ae5051c-53b9-4c53-87b2-d7263cda3b7b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'custom_tool_data': 'books'}\n", + "{'custom_tool_data': 'penciles'}\n", + "{'custom_tool_data': 'pictures'}\n" + ] + } + ], + "source": [ + "inputs = {\n", + " \"messages\": [ # noqa\n", + " {\"role\": \"user\", \"content\": \"what items are in the office?\"}\n", + " ]\n", + "}\n", + "async for chunk in agent.astream(\n", + " inputs,\n", + " # highlight-next-line\n", + " stream_mode=\"custom\",\n", + "):\n", + " print(chunk)" + ] + }, + { + "cell_type": "markdown", + "id": "d598d7e2-617d-4c06-bc9a-6a03d5f58499", + "metadata": {}, + "source": [ + "## Example without LangChain" + ] + }, + { + "cell_type": "markdown", + "id": "780ddcb6-63a7-4c83-a739-bafbe3cd135a", + "metadata": {}, + "source": [ + "You can also stream data from within tool invocations **without using LangChain**. Below example demonstrates how to do it for a graph with a single tool-executing node. We'll leave it as an exercise for the reader to [implement ReAct agent from scratch](../react-agent-from-scratch) without using LangChain." ] }, { "cell_type": "code", "execution_count": 7, - "id": "c3acdec9-0a24-4348-921e-435c8ea6f9fe", + "id": "3e8be67f-4bb8-4f14-9fdb-fc60340f3930", + "metadata": {}, + "outputs": [], + "source": [ + "import operator\n", + "import json\n", + "\n", + "from typing import TypedDict\n", + "from typing_extensions import Annotated\n", + "from langgraph.graph import StateGraph, START\n", + "\n", + "from openai import AsyncOpenAI\n", + "\n", + "openai_client = AsyncOpenAI()\n", + "model_name = \"gpt-4o-mini\"\n", + "\n", + "\n", + "async def stream_tokens(model_name: str, messages: list[dict]):\n", + " response = await openai_client.chat.completions.create(\n", + " messages=messages, model=model_name, stream=True\n", + " )\n", + " role = None\n", + " async for chunk in response:\n", + " delta = chunk.choices[0].delta\n", + "\n", + " if delta.role is not None:\n", + " role = delta.role\n", + "\n", + " if delta.content:\n", + " yield {\"role\": role, \"content\": delta.content}\n", + "\n", + "\n", + "# this is our tool\n", + "async def get_items(place: str) -> str:\n", + " \"\"\"Use this tool to list items one might find in a place you're asked about.\"\"\"\n", + " # highlight-next-line\n", + " writer = get_stream_writer()\n", + " response = \"\"\n", + " async for msg_chunk in stream_tokens(\n", + " model_name,\n", + " [\n", + " {\n", + " \"role\": \"user\",\n", + " \"content\": (\n", + " \"Can you tell me what kind of items \"\n", + " f\"i might find in the following place: '{place}'. \"\n", + " \"List at least 3 such items separating them by a comma. \"\n", + " \"And include a brief description of each item.\"\n", + " ),\n", + " }\n", + " ],\n", + " ):\n", + " response += msg_chunk[\"content\"]\n", + " # highlight-next-line\n", + " writer(msg_chunk)\n", + "\n", + " return response\n", + "\n", + "\n", + "class State(TypedDict):\n", + " messages: Annotated[list[dict], operator.add]\n", + "\n", + "\n", + "# this is the tool-calling graph node\n", + "async def call_tool(state: State):\n", + " ai_message = state[\"messages\"][-1]\n", + " tool_call = ai_message[\"tool_calls\"][-1]\n", + "\n", + " function_name = tool_call[\"function\"][\"name\"]\n", + " if function_name != \"get_items\":\n", + " raise ValueError(f\"Tool {function_name} not supported\")\n", + "\n", + " function_arguments = tool_call[\"function\"][\"arguments\"]\n", + " arguments = json.loads(function_arguments)\n", + "\n", + " function_response = await get_items(**arguments)\n", + " tool_message = {\n", + " \"tool_call_id\": tool_call[\"id\"],\n", + " \"role\": \"tool\",\n", + " \"name\": function_name,\n", + " \"content\": function_response,\n", + " }\n", + " return {\"messages\": [tool_message]}\n", + "\n", + "\n", + "graph = (\n", + " StateGraph(State) # noqa\n", + " .add_node(call_tool)\n", + " .add_edge(START, \"call_tool\")\n", + " .compile()\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "4e712d12-841c-4eac-a4d8-d01c73c86c8c", + "metadata": {}, + "source": [ + "Let's now invoke our graph with an AI message that contains a tool call:" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "2c30c7b4-62df-4855-8219-d5e1a1a09be9", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "|In| a| bedroom|,| you| might| find| the| following| items|:\n", + "Sure|!| Here| are| three| common| items| you| might| find| in| a| bedroom|:\n", "\n", - "|1|.| **|Bed|**|:| The| central| piece| of| furniture| in| a| bedroom|,| typically| consisting| of| a| mattress| on| a| frame|,| where| people| sleep|.| It| often| includes| bedding| such| as| sheets|,| blankets|,| and| pillows| for| comfort|.\n", + "|1|.| **|Bed|**|:| A| piece| of| furniture| typically| consisting| of| a| mattress| on| a| frame|,| providing| a| comfortable| place| for| sleep|.| Beds| can| vary| in| size| (|t|win|,| full|,| queen|,| king|)| and| style| (|platform|,| canopy|,| adjustable|).\n", "\n", - "|2|.| **|Ward|robe|**|:| A| large|,| tall| cupboard| or| fre|estanding| piece| of| furniture| used| for| storing| clothes|.| It| may| have| hanging| space|,| shelves|,| and| sometimes| drawers| for| organizing| garments| and| accessories|.\n", + "|2|.| **|Night|stand|**|:| A| small| table| or| cabinet| usually| positioned| next| to| the| bed|,| used| for| holding| items| such| as| a| lamp|,| alarm| clock|,| books|,| or| personal| belongings|.| Night|stands| often| have| drawers| or| shelves| for| added| storage|.\n", "\n", - "|3|.| **|Night|stand|**|:| A| small| table| or| cabinet| placed| beside| the| bed|,| used| for| holding| items| like| a| lamp|,| alarm| clock|,| books|,| or| personal| belongings| that| might| be| needed| during| the| night| or| early| morning|.||" + "|3|.| **|D|resser|**|:| A| larger| piece| of| furniture| with| multiple| drawers| used| for| storing| clothes|,| accessories|,| and| other| personal| items|.| Dress|ers| often| have| a| flat| top| where| decorative| items|,| mirrors|,| or| personal| effects| can| be| displayed|.|" ] } ], "source": [ - "from langchain_core.messages import HumanMessage\n", + "inputs = {\n", + " \"messages\": [\n", + " {\n", + " \"content\": None,\n", + " \"role\": \"assistant\",\n", + " \"tool_calls\": [\n", + " {\n", + " \"id\": \"1\",\n", + " \"function\": {\n", + " \"arguments\": '{\"place\":\"bedroom\"}',\n", + " \"name\": \"get_items\",\n", + " },\n", + " \"type\": \"function\",\n", + " }\n", + " ],\n", + " }\n", + " ]\n", + "}\n", "\n", - "async for event in agent.astream_events(\n", - " {\"messages\": [{\"role\": \"user\", \"content\": \"what's in the bedroom.\"}]}, version=\"v2\"\n", + "async for chunk in graph.astream(\n", + " inputs,\n", + " # highlight-next-line\n", + " stream_mode=\"custom\",\n", "):\n", - " if (\n", - " event[\"event\"] == \"on_chat_model_stream\"\n", - " and event[\"metadata\"].get(\"langgraph_node\") == \"tools\"\n", - " ):\n", - " print(event[\"data\"][\"chunk\"].content, end=\"|\", flush=True)" + " print(chunk[\"content\"], end=\"|\", flush=True)" ] } ], @@ -267,7 +496,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.4" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/docs/docs/how-tos/streaming-events-from-within-tools.md b/docs/docs/how-tos/streaming-events-from-within-tools.md new file mode 100644 index 0000000000..57ba5d7276 --- /dev/null +++ b/docs/docs/how-tos/streaming-events-from-within-tools.md @@ -0,0 +1,2 @@ +WARNING: DO NOT MODIFY/DELETE +This is a dummy file needed for mkdocs-redirects, as it is expecting redirects to be markdown files diff --git a/docs/docs/how-tos/streaming-subgraphs.ipynb b/docs/docs/how-tos/streaming-subgraphs.ipynb index 2ef34464a6..6978cbc900 100644 --- a/docs/docs/how-tos/streaming-subgraphs.ipynb +++ b/docs/docs/how-tos/streaming-subgraphs.ipynb @@ -10,7 +10,7 @@ "\n", " This guide assumes familiarity with the following:\n", " \n", - " - [Subgraphs](../..//concepts/low_level/#subgraphs)\n", + " - [Subgraphs](../../concepts/low_level/#subgraphs)\n", " - [Chat Models](https://python.langchain.com/docs/concepts/chat_models/)\n", "\n", "If you have created a graph with [subgraphs](../subgraph), you may wish to stream outputs from those subgraphs. To do so, you can specify `subgraphs=True` in parent graph's `.stream()` method:\n", diff --git a/docs/docs/how-tos/streaming-tokens.ipynb b/docs/docs/how-tos/streaming-tokens.ipynb index 21915daca3..b92db68c81 100644 --- a/docs/docs/how-tos/streaming-tokens.ipynb +++ b/docs/docs/how-tos/streaming-tokens.ipynb @@ -44,9 +44,9 @@ "\n", " If you need to stream LLM tokens **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/#stream_modecustom) to stream the outputs from LLM provider clients directly. Check out the [example below](#example-without-langchain) to learn more.\n", "\n", - "!!! warning \"Note on Python < 3.11\"\n", + "!!! warning \"Async in Python < 3.11\"\n", " \n", - " When using python 3.8, 3.9, or 3.10, please ensure you manually pass the `RunnableConfig` through to the chat model when invoking it like so: `model.ainvoke(..., config)`.\n", + " When using Python < 3.11 with async code, please ensure you manually pass the `RunnableConfig` through to the chat model when invoking it like so: `model.ainvoke(..., config)`.\n", " The stream method collects all events from your nested code using a streaming tracer passed as a callback. In 3.11 and above, this is automatically handled via [contextvars](https://docs.python.org/3/library/contextvars.html); prior to 3.11, [asyncio's tasks](https://docs.python.org/3/library/asyncio-task.html#asyncio.create_task) lacked proper `contextvar` support, meaning that the callbacks will only propagate if you manually pass the config through. We do this in the `call_model` function below." ] }, @@ -329,11 +329,16 @@ " response = await openai_client.chat.completions.create(\n", " messages=messages, model=model_name, stream=True\n", " )\n", + "\n", + " role = None\n", " async for chunk in response:\n", " delta = chunk.choices[0].delta\n", "\n", + " if delta.role is not None:\n", + " role = delta.role\n", + "\n", " if delta.content:\n", - " yield {\"role\": delta.role, \"content\": delta.content}\n", + " yield {\"role\": role, \"content\": delta.content}\n", "\n", "\n", "# highlight-next-line\n", diff --git a/docs/docs/how-tos/streaming-tokens.md b/docs/docs/how-tos/streaming-tokens.md new file mode 100644 index 0000000000..57ba5d7276 --- /dev/null +++ b/docs/docs/how-tos/streaming-tokens.md @@ -0,0 +1,2 @@ +WARNING: DO NOT MODIFY/DELETE +This is a dummy file needed for mkdocs-redirects, as it is expecting redirects to be markdown files diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index a4739d22c0..87805b305b 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -65,6 +65,7 @@ plugins: 'how-tos/stream-multiple.md': 'how-tos/streaming.md#multiple-streaming-modes' 'how-tos/streaming-tokens-without-langchain.md': 'how-tos/streaming-tokens.md#example-without-langchain' 'how-tos/streaming-from-final-node.md': 'how-tos/streaming-specific-nodes.md' + 'how-tos/streaming-events-from-within-tools-without-langchain.md': 'how-tos/streaming-events-from-within-tools.md#example-without-langchain' # cloud redirects 'cloud/index.md': 'concepts/index.md#langgraph-platform' 'cloud/how-tos/index.md': 'how-tos/index.md#langgraph-platform' @@ -150,7 +151,6 @@ nav: - how-tos/streaming-tokens.ipynb - how-tos/streaming-specific-nodes.ipynb - how-tos/streaming-events-from-within-tools.ipynb - - how-tos/streaming-events-from-within-tools-without-langchain.ipynb - how-tos/streaming-subgraphs.ipynb - how-tos/disable-streaming.ipynb - Tool calling: From d8245768c1f3eeff3434629ca80b8b4e96b30c65 Mon Sep 17 00:00:00 2001 From: vbarda Date: Thu, 30 Jan 2025 16:00:43 -0500 Subject: [PATCH 09/14] cr --- docs/docs/how-tos/streaming-tokens.ipynb | 16 ++++---- docs/docs/how-tos/streaming.ipynb | 50 +++++++++++++++++++++--- 2 files changed, 52 insertions(+), 14 deletions(-) diff --git a/docs/docs/how-tos/streaming-tokens.ipynb b/docs/docs/how-tos/streaming-tokens.ipynb index b92db68c81..58775aa22b 100644 --- a/docs/docs/how-tos/streaming-tokens.ipynb +++ b/docs/docs/how-tos/streaming-tokens.ipynb @@ -113,30 +113,28 @@ }, { "cell_type": "markdown", - "id": "8a592001", + "id": "e03c5094-9297-4d19-a04e-3eedc75cefb4", "metadata": {}, "source": [ + "!!! note Manual Callback Propagation\n", "\n", - "After we've done this, we should make sure the model knows that it has these tools available to call.\n", - "We can do this by converting the LangChain tools into the format for function calling, and then bind them to the model class.\n" + " Note that in `call_model(state: State, config: RunnableConfig):` below, we a) accept the [`RunnableConfig`](https://python.langchain.com/api_reference/core/runnables/langchain_core.runnables.config.RunnableConfig.html#langchain_core.runnables.config.RunnableConfig) in the node function and b) pass it in as the second arg for `model.ainvoke(..., config)`. This is optional for python >= 3.11." ] }, { "cell_type": "markdown", - "id": "e03c5094-9297-4d19-a04e-3eedc75cefb4", + "id": "ad2c85b6-28f8-4c7f-843a-c05cb7fd7187", "metadata": {}, "source": [ - "!!! note Manual Callback Propagation\n", - "\n", - " Note that in `call_model(state: State, config: RunnableConfig):` below, we a) accept the [`RunnableConfig`](https://python.langchain.com/api_reference/core/runnables/langchain_core.runnables.config.RunnableConfig.html#langchain_core.runnables.config.RunnableConfig) in the node function and b) pass it in as the second arg for `model.ainvoke(..., config)`. This is optional for python >= 3.11." + "## Example" ] }, { "cell_type": "markdown", - "id": "ad2c85b6-28f8-4c7f-843a-c05cb7fd7187", + "id": "afcbdd41-dff8-4118-8901-a619f91f3feb", "metadata": {}, "source": [ - "## Example" + "Below we demonstrate an example with two LLM calls in a single node." ] }, { diff --git a/docs/docs/how-tos/streaming.ipynb b/docs/docs/how-tos/streaming.ipynb index 6c577fc580..671742c407 100644 --- a/docs/docs/how-tos/streaming.ipynb +++ b/docs/docs/how-tos/streaming.ipynb @@ -27,11 +27,11 @@ "- `\"values\"`: Emit all values in the state after each step.\n", "- `\"updates\"`: Emit only the node or task names and updates returned by the nodes or tasks after each step.\n", " If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are emitted separately.\n", - "- `\"custom\"`: Emit custom data using from inside nodes or tasks using `StreamWriter`.\n", - "- `\"messages\"`: Emit LLM messages token-by-token together with metadata for any LLM invocations inside nodes or tasks.\n", + "- `\"custom\"`: Emit custom data from inside nodes or tasks using `StreamWriter`.\n", + "- [`\"messages\"`](../streaming-tokens): Emit LLM messages token-by-token together with metadata for any LLM invocations inside nodes or tasks.\n", "- `\"debug\"`: Emit debug events with as much information as possible for each step.\n", "\n", - "You can stream outputs from the graph by using `.stream()` / `.astream()` methods:\n", + "You can stream outputs from the graph by using `graph.stream(..., stream_mode=)` method, e.g.:\n", "\n", "=== \"Sync\"\n", "\n", @@ -181,6 +181,14 @@ "## stream_mode=\"values\"" ] }, + { + "cell_type": "markdown", + "id": "d1ed60d4-cf78-4d4d-a660-6879539e168f", + "metadata": {}, + "source": [ + "Use this to stream **all values** in the state after each step." + ] + }, { "cell_type": "code", "execution_count": 4, @@ -214,6 +222,14 @@ "## stream_mode=\"updates\"" ] }, + { + "cell_type": "markdown", + "id": "44c55326-d077-4583-ae5b-396f45daf21c", + "metadata": {}, + "source": [ + "Use this to stream only the **state updates** returned by the nodes after each step. The streamed outputs include the name of the node as well as the update." + ] + }, { "cell_type": "code", "execution_count": 5, @@ -246,6 +262,14 @@ "## stream_mode=\"debug\"" ] }, + { + "cell_type": "markdown", + "id": "94690715-f86c-42f6-be2d-4df82f6f9a96", + "metadata": {}, + "source": [ + "Use this to stream **debug events** with as much information as possible for each step. Includes information about tasks that were scheduled to be executed as well as the results of the task executions." + ] + }, { "cell_type": "code", "execution_count": 6, @@ -277,7 +301,7 @@ "id": "6791da60-0513-43e6-b445-788dd81683bb", "metadata": {}, "source": [ - "## stream_mode=\"messages\"" + "## [stream_mode=\"messages\"](../streaming-tokens)" ] }, { @@ -285,7 +309,7 @@ "id": "1f45d68b-f7ca-4012-96cc-d276a143f571", "metadata": {}, "source": [ - "Let's modify the above example to include LLM calls:" + "Use this to stream **LLM messages token-by-token** together with metadata for any LLM invocations inside nodes or tasks. Let's modify the above example to include LLM calls:" ] }, { @@ -388,6 +412,14 @@ "## stream_mode=\"custom\"" ] }, + { + "cell_type": "markdown", + "id": "e9ca56cc-d36e-4061-b1f6-9ade4e3e00a0", + "metadata": {}, + "source": [ + "Use this to stream custom data from inside nodes using [`StreamWriter`][langgraph.types.StreamWriter]." + ] + }, { "cell_type": "code", "execution_count": 10, @@ -446,6 +478,14 @@ "## Multiple streaming modes" ] }, + { + "cell_type": "markdown", + "id": "01ff946a-f38d-42ad-bc71-a2621fab1b6c", + "metadata": {}, + "source": [ + "Use this to combine multiple streaming modes. The outputs are streamed as tuples `(stream_mode, streamed_output)`." + ] + }, { "cell_type": "code", "execution_count": 12, From b2928a17f42880c6ee76f3bea88bb768fd21905c Mon Sep 17 00:00:00 2001 From: vbarda Date: Thu, 30 Jan 2025 17:24:57 -0500 Subject: [PATCH 10/14] update --- docs/docs/how-tos/streaming-events-from-within-tools.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/how-tos/streaming-events-from-within-tools.ipynb b/docs/docs/how-tos/streaming-events-from-within-tools.ipynb index 2ad9beb1c0..cfc222755b 100644 --- a/docs/docs/how-tos/streaming-events-from-within-tools.ipynb +++ b/docs/docs/how-tos/streaming-events-from-within-tools.ipynb @@ -53,7 +53,7 @@ "\n", " ```python\n", " # highlight-next-line\n", - " from langgraph.utils.config import get_stream_writer\n", + " from langgraph.config import get_stream_writer\n", " \n", " def tool(tool_arg: str):\n", " writer = get_stream_writer()\n", @@ -258,7 +258,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langgraph.utils.config import get_stream_writer\n", + "from langgraph.config import get_stream_writer\n", "\n", "\n", "@tool\n", From 91f74b24c19650f076dd15403facbcfddc28bdfe Mon Sep 17 00:00:00 2001 From: vbarda Date: Thu, 30 Jan 2025 17:27:50 -0500 Subject: [PATCH 11/14] nit --- docs/docs/how-tos/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/how-tos/index.md b/docs/docs/how-tos/index.md index 4727dccce8..34a1e27c54 100644 --- a/docs/docs/how-tos/index.md +++ b/docs/docs/how-tos/index.md @@ -84,7 +84,7 @@ See the below guides for how-to implement human-in-the-loop workflows with the ( - [How to stream graph outputs](streaming.ipynb) - [How to stream LLM tokens](streaming-tokens.ipynb) - [How to stream LLM tokens from specific nodes](streaming-specific-nodes.ipynb) -- [How to stream events from within a tool](streaming-events-from-within-tools.ipynb) +- [How to stream data from within a tool](streaming-events-from-within-tools.ipynb) - [How to stream from subgraphs](streaming-subgraphs.ipynb) - [How to disable streaming for models that don't support it](disable-streaming.ipynb) From a6686374cdb40a688dd56599135f55ef9a05492a Mon Sep 17 00:00:00 2001 From: vbarda Date: Thu, 30 Jan 2025 17:32:09 -0500 Subject: [PATCH 12/14] update venv --- docs/docs/how-tos/streaming-subgraphs.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/docs/how-tos/streaming-subgraphs.ipynb b/docs/docs/how-tos/streaming-subgraphs.ipynb index 6978cbc900..d21fc4ccaf 100644 --- a/docs/docs/how-tos/streaming-subgraphs.ipynb +++ b/docs/docs/how-tos/streaming-subgraphs.ipynb @@ -184,9 +184,9 @@ ], "metadata": { "kernelspec": { - "display_name": "langgraph", + "display_name": "Python 3 (ipykernel)", "language": "python", - "name": "langgraph" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -198,7 +198,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.9" + "version": "3.12.3" } }, "nbformat": 4, From 93af8766ec58925215428f7e41cfec6ca6771ed8 Mon Sep 17 00:00:00 2001 From: vbarda Date: Fri, 31 Jan 2025 12:30:59 -0500 Subject: [PATCH 13/14] cr --- docs/docs/concepts/high_level.md | 2 +- docs/docs/concepts/streaming.md | 12 +- docs/docs/how-tos/index.md | 2 +- .../streaming-events-from-within-tools.ipynb | 191 +++++++++--------- docs/docs/how-tos/streaming-tokens.ipynb | 2 +- docs/docs/how-tos/streaming.ipynb | 20 +- docs/mkdocs.yml | 8 +- libs/langgraph/langgraph/pregel/__init__.py | 4 +- 8 files changed, 121 insertions(+), 120 deletions(-) diff --git a/docs/docs/concepts/high_level.md b/docs/docs/concepts/high_level.md index 3124e56dc6..fa66291b33 100644 --- a/docs/docs/concepts/high_level.md +++ b/docs/docs/concepts/high_level.md @@ -19,7 +19,7 @@ LangGraph has a [persistence layer](https://langchain-ai.github.io/langgraph/con ### Streaming -LangGraph also provides support for [streaming](../how-tos/index.md#streaming) workflow / agent state to the user (or developer) over the course of execution. LangGraph supports streaming of both events ([such as feedback from a tool call](../how-tos/stream-updates.ipynb)) and [tokens from LLM calls](../how-tos/streaming-tokens.ipynb) embedded in an application. +LangGraph also provides support for [streaming](../how-tos/index.md#streaming) workflow / agent state to the user (or developer) over the course of execution. LangGraph supports streaming of both events ([such as feedback from a tool call](../../how-tos/streaming#updates)) and [tokens from LLM calls](../../how-tos/streaming-tokens) embedded in an application. ### Debugging and Deployment diff --git a/docs/docs/concepts/streaming.md b/docs/docs/concepts/streaming.md index 4cff014971..b2c1b32376 100644 --- a/docs/docs/concepts/streaming.md +++ b/docs/docs/concepts/streaming.md @@ -7,11 +7,11 @@ LangGraph is built with first class support for streaming. There are several dif `.stream` and `.astream` are sync and async methods for streaming back outputs from a graph run. There are several different modes you can specify when calling these methods (e.g. `graph.stream(..., mode="...")): -- [`"values"`](../how-tos/stream-values.ipynb): This streams the full value of the state after each step of the graph. -- [`"updates"`](../how-tos/stream-updates.ipynb): This streams the updates to the state after each step of the graph. If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are streamed separately. -- [`"custom"`](../how-tos/streaming-content.ipynb): This streams custom data from inside your graph nodes. -- [`"messages"`](../how-tos/streaming-tokens.ipynb): This streams LLM tokens and metadata for the graph node where LLM is invoked. -- `"debug"`: This streams as much information as possible throughout the execution of the graph. +- [`"values"`](../../how-tos/streaming#values): This streams the full value of the state after each step of the graph. +- [`"updates"`](../../how-tos/streaming#updates): This streams the updates to the state after each step of the graph. If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are streamed separately. +- [`"custom"`](../../how-tos/streaming#custom): This streams custom data from inside your graph nodes. +- [`"messages"`](../../how-tos/streaming-tokens): This streams LLM tokens and metadata for the graph node where LLM is invoked. +- [`"debug"`](../../how-tos/streaming#debug): This streams as much information as possible throughout the execution of the graph. You can also specify multiple streaming modes at the same time by passing them as a list. When you do this, the streamed outputs will be tuples `(stream_mode, data)`. For example: @@ -145,7 +145,7 @@ guide for that [here](../how-tos/streaming-tokens.ipynb). !!! warning "ASYNC IN PYTHON<=3.10" - You may fail to see events being emitted from inside a node when using `.astream_events` in Python <= 3.10. If you're using a Langchain RunnableLambda, a RunnableGenerator, or Tool asynchronously inside your node, you will have to propagate callbacks to these objects manually. This is because LangChain cannot automatically propagate callbacks to child objects in this case. Please see examples [here](../how-tos/streaming-content.ipynb) and [here](../how-tos/streaming-events-from-within-tools.ipynb). + You may fail to see events being emitted from inside a node when using `.astream_events` in Python <= 3.10. If you're using a Langchain RunnableLambda, a RunnableGenerator, or Tool asynchronously inside your node, you will have to propagate callbacks to these objects manually. This is because LangChain cannot automatically propagate callbacks to child objects in this case. Please see examples [here](../../how-tos/streaming-tokens) and [here](../../how-tos/streaming-events-from-within-tools). ## LangGraph Platform diff --git a/docs/docs/how-tos/index.md b/docs/docs/how-tos/index.md index 34a1e27c54..4d16c91f59 100644 --- a/docs/docs/how-tos/index.md +++ b/docs/docs/how-tos/index.md @@ -81,7 +81,7 @@ See the below guides for how-to implement human-in-the-loop workflows with the ( [Streaming](../concepts/streaming.md) is crucial for enhancing the responsiveness of applications built on LLMs. By displaying output progressively, even before a complete response is ready, streaming significantly improves user experience (UX), particularly when dealing with the latency of LLMs. -- [How to stream graph outputs](streaming.ipynb) +- [How to stream](streaming.ipynb) - [How to stream LLM tokens](streaming-tokens.ipynb) - [How to stream LLM tokens from specific nodes](streaming-specific-nodes.ipynb) - [How to stream data from within a tool](streaming-events-from-within-tools.ipynb) diff --git a/docs/docs/how-tos/streaming-events-from-within-tools.ipynb b/docs/docs/how-tos/streaming-events-from-within-tools.ipynb index cfc222755b..fca8586af2 100644 --- a/docs/docs/how-tos/streaming-events-from-within-tools.ipynb +++ b/docs/docs/how-tos/streaming-events-from-within-tools.ipynb @@ -18,7 +18,29 @@ "\n", "If your graph calls tools that use LLMs or any other streaming APIs, you might want to surface partial results during the execution of the tool, especially if the tool takes a longer time to run.\n", "\n", - "1. To stream LLM tokens generated by a tool calling an LLM you can use [`stream_mode=\"messages\"`](../streaming#stream_modemessages):\n", + "1. To stream **arbitrary** data from inside a tool you can use [`stream_mode=\"custom\"`](../streaming#custom) and `get_stream_writer()`:\n", + "\n", + " ```python\n", + " # highlight-next-line\n", + " from langgraph.config import get_stream_writer\n", + " \n", + " def tool(tool_arg: str):\n", + " writer = get_stream_writer()\n", + " for chunk in custom_data_stream():\n", + " # stream any arbitrary data\n", + " # highlight-next-line\n", + " writer(chunk)\n", + " ...\n", + " \n", + " for chunk in graph.stream(\n", + " inputs,\n", + " # highlight-next-line\n", + " stream_mode=\"custom\"\n", + " ):\n", + " print(chunk)\n", + " ```\n", + "\n", + "2. To stream LLM tokens generated by a tool calling an LLM you can use [`stream_mode=\"messages\"`](../streaming#messages):\n", "\n", " ```python\n", " from langgraph.graph import StateGraph, MessagesState\n", @@ -49,31 +71,9 @@ " print(msg)\n", " ```\n", "\n", - "2. To stream **arbitrary** data from inside a tool you can use [`stream_mode=\"custom\"`](../streaming#stream_modemessages) and `get_stream_writer()`:\n", - "\n", - " ```python\n", - " # highlight-next-line\n", - " from langgraph.config import get_stream_writer\n", - " \n", - " def tool(tool_arg: str):\n", - " writer = get_stream_writer()\n", - " for chunk in custom_data_stream():\n", - " # stream any arbitrary data\n", - " # highlight-next-line\n", - " writer(chunk)\n", - " ...\n", - " \n", - " for chunk in graph.stream(\n", - " inputs,\n", - " # highlight-next-line\n", - " stream_mode=\"custom\"\n", - " ):\n", - " print(chunk)\n", - " ```\n", - "\n", "!!! note \"Using without LangChain\"\n", "\n", - " If you need to stream data from inside tools **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/#stream_modecustom). Check out the [example below](#example-without-langchain) to learn more.\n", + " If you need to stream data from inside tools **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/#custom). Check out the [example below](#example-without-langchain) to learn more.\n", "\n", "!!! warning \"Async in Python < 3.11\"\n", " \n", @@ -138,10 +138,10 @@ }, { "cell_type": "markdown", - "id": "e3d02ebb-c2e1-4ef7-b187-810d55139317", + "id": "b4ddc3ff-5620-48de-82f0-03b9137410cf", "metadata": {}, "source": [ - "## Define the graph\n", + "## Streaming custom data\n", "\n", "We'll use a [prebuilt ReAct agent][langgraph.prebuilt.chat_agent_executor.create_react_agent] for this guide:" ] @@ -153,39 +153,26 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain_core.callbacks import Callbacks\n", "from langchain_core.tools import tool\n", - "from langchain_core.messages import AIMessageChunk\n", - "from langchain_core.runnables import RunnableConfig\n", + "from langchain_openai import ChatOpenAI\n", "\n", "from langgraph.prebuilt import create_react_agent\n", - "from langchain_openai import ChatOpenAI\n", + "from langgraph.config import get_stream_writer\n", "\n", "\n", "@tool\n", - "async def get_items(\n", - " place: str,\n", - " # Manually accept config (needed for Python <= 3.10)\n", - " # highlight-next-line\n", - " config: RunnableConfig,\n", - ") -> str:\n", + "async def get_items(place: str) -> str:\n", " \"\"\"Use this tool to list items one might find in a place you're asked about.\"\"\"\n", - " # Attention: when using async, you should be invoking the LLM using ainvoke!\n", - " # If you fail to do so, streaming will NOT work.\n", - " response = await llm.ainvoke(\n", - " [\n", - " {\n", - " \"role\": \"user\",\n", - " \"content\": (\n", - " f\"Can you tell me what kind of items i might find in the following place: '{place}'. \"\n", - " \"List at least 3 such items separating them by a comma. And include a brief description of each item.\"\n", - " ),\n", - " }\n", - " ],\n", + " # highlight-next-line\n", + " writer = get_stream_writer()\n", + "\n", + " # this can be replaced with any actual streaming logic that you might have\n", + " items = [\"books\", \"penciles\", \"pictures\"]\n", + " for chunk in items:\n", " # highlight-next-line\n", - " config,\n", - " )\n", - " return response.content\n", + " writer({\"custom_tool_data\": chunk})\n", + "\n", + " return \", \".join(items)\n", "\n", "\n", "llm = ChatOpenAI(model_name=\"gpt-4o-mini\")\n", @@ -196,51 +183,40 @@ }, { "cell_type": "markdown", - "id": "15cb55cc-b59d-4743-b6a3-13db75414d2c", + "id": "fa96d572-d15f-4f00-b629-cf25e0b4dece", "metadata": {}, "source": [ - "## Streaming LLM tokens" + "Let's now invoke our agent with an input that requires a tool call:" ] }, { "cell_type": "code", "execution_count": 4, - "id": "4c9cdad3-3e9a-444f-9d9d-eae20b8d3486", + "id": "8ae5051c-53b9-4c53-87b2-d7263cda3b7b", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Certainly|!| Here| are| three| items| you| might| find| in| a| bedroom|:\n", - "\n", - "|1|.| **|Bed|**|:| A| bed| is| a| piece| of| furniture| used| for| sleeping| or| resting|.| It| typically| consists| of| a| mattress| supported| by| a| frame| and| may| include| bedding| such| as| sheets|,| blankets|,| and| pillows| for| comfort|.\n", - "\n", - "|2|.| **|D|resser|**|:| A| dresser| is| a| storage| piece| of| furniture| with| drawers|,| used| for| organizing| clothing| and| personal| items|.| It| often| features| a| flat| surface| on| top|,| which| can| be| used| for| displaying| decorative| items| or| personal| care| products|.\n", - "\n", - "|3|.| **|Night|stand|**|:| A| night|stand| is| a| small| table| located| beside| the| bed|,| providing| a| convenient| place| to| keep| items| such| as| a| lamp|,| alarm| clock|,| books|,| or| glasses|.| It| often| includes| one| or| more| drawers| for| additional| storage|.|" + "{'custom_tool_data': 'books'}\n", + "{'custom_tool_data': 'penciles'}\n", + "{'custom_tool_data': 'pictures'}\n" ] } ], "source": [ "inputs = {\n", " \"messages\": [ # noqa\n", - " {\"role\": \"user\", \"content\": \"what items are in the bedroom?\"}\n", + " {\"role\": \"user\", \"content\": \"what items are in the office?\"}\n", " ]\n", "}\n", - "async for msg, metadata in agent.astream(\n", + "async for chunk in agent.astream(\n", " inputs,\n", " # highlight-next-line\n", - " stream_mode=\"messages\",\n", + " stream_mode=\"custom\",\n", "):\n", - " if (\n", - " isinstance(msg, AIMessageChunk)\n", - " and msg.content\n", - " # Stream all messages from the tool node\n", - " # highlight-next-line\n", - " and metadata[\"langgraph_node\"] == \"tools\"\n", - " ):\n", - " print(msg.content, end=\"|\", flush=True)" + " print(chunk)" ] }, { @@ -248,7 +224,7 @@ "id": "6d8fa9fc-19af-47d6-9031-ee1720c51aa2", "metadata": {}, "source": [ - "## Streaming custom data" + "## Streaming LLM tokens" ] }, { @@ -258,22 +234,34 @@ "metadata": {}, "outputs": [], "source": [ - "from langgraph.config import get_stream_writer\n", + "from langchain_core.messages import AIMessageChunk\n", + "from langchain_core.runnables import RunnableConfig\n", "\n", "\n", "@tool\n", - "async def get_items(place: str) -> str:\n", - " \"\"\"Use this tool to list items one might find in a place you're asked about.\"\"\"\n", + "async def get_items(\n", + " place: str,\n", + " # Manually accept config (needed for Python <= 3.10)\n", " # highlight-next-line\n", - " writer = get_stream_writer()\n", - "\n", - " # this can be replaced with any actual streaming logic that you might have\n", - " items = [\"books\", \"penciles\", \"pictures\"]\n", - " for chunk in items:\n", + " config: RunnableConfig,\n", + ") -> str:\n", + " \"\"\"Use this tool to list items one might find in a place you're asked about.\"\"\"\n", + " # Attention: when using async, you should be invoking the LLM using ainvoke!\n", + " # If you fail to do so, streaming will NOT work.\n", + " response = await llm.ainvoke(\n", + " [\n", + " {\n", + " \"role\": \"user\",\n", + " \"content\": (\n", + " f\"Can you tell me what kind of items i might find in the following place: '{place}'. \"\n", + " \"List at least 3 such items separating them by a comma. And include a brief description of each item.\"\n", + " ),\n", + " }\n", + " ],\n", " # highlight-next-line\n", - " writer({\"custom_tool_data\": chunk})\n", - "\n", - " return \", \".join(items)\n", + " config,\n", + " )\n", + " return response.content\n", "\n", "\n", "tools = [get_items]\n", @@ -284,31 +272,42 @@ { "cell_type": "code", "execution_count": 6, - "id": "8ae5051c-53b9-4c53-87b2-d7263cda3b7b", + "id": "4c9cdad3-3e9a-444f-9d9d-eae20b8d3486", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "{'custom_tool_data': 'books'}\n", - "{'custom_tool_data': 'penciles'}\n", - "{'custom_tool_data': 'pictures'}\n" + "Certainly|!| Here| are| three| items| you| might| find| in| a| bedroom|:\n", + "\n", + "|1|.| **|Bed|**|:| The| central| piece| of| furniture| in| a| bedroom|,| typically| consisting| of| a| mattress| supported| by| a| frame|.| It| is| designed| for| sleeping| and| can| vary| in| size| from| twin| to| king|.| Beds| often| have| bedding|,| including| sheets|,| pillows|,| and| comfort|ers|,| to| enhance| comfort|.\n", + "\n", + "|2|.| **|D|resser|**|:| A| piece| of| furniture| with| drawers| used| for| storing| clothing| and| personal| items|.| Dress|ers| often| have| a| flat| surface| on| top|,| which| can| be| used| for| decorative| items|,| a| mirror|,| or| personal| accessories|.| They| help| keep| the| bedroom| organized| and| clutter|-free|.\n", + "\n", + "|3|.| **|Night|stand|**|:| A| small| table| or| cabinet| placed| beside| the| bed|,| used| for| holding| items| such| as| a| lamp|,| alarm| clock|,| books|,| or| personal| items|.| Night|stands| provide| convenience| for| easy| access| to| essentials| during| the| night|,| adding| functionality| and| style| to| the| bedroom| decor|.|" ] } ], "source": [ "inputs = {\n", " \"messages\": [ # noqa\n", - " {\"role\": \"user\", \"content\": \"what items are in the office?\"}\n", + " {\"role\": \"user\", \"content\": \"what items are in the bedroom?\"}\n", " ]\n", "}\n", - "async for chunk in agent.astream(\n", + "async for msg, metadata in agent.astream(\n", " inputs,\n", " # highlight-next-line\n", - " stream_mode=\"custom\",\n", + " stream_mode=\"messages\",\n", "):\n", - " print(chunk)" + " if (\n", + " isinstance(msg, AIMessageChunk)\n", + " and msg.content\n", + " # Stream all messages from the tool node\n", + " # highlight-next-line\n", + " and metadata[\"langgraph_node\"] == \"tools\"\n", + " ):\n", + " print(msg.content, end=\"|\", flush=True)" ] }, { @@ -443,11 +442,13 @@ "text": [ "Sure|!| Here| are| three| common| items| you| might| find| in| a| bedroom|:\n", "\n", - "|1|.| **|Bed|**|:| A| piece| of| furniture| typically| consisting| of| a| mattress| on| a| frame|,| providing| a| comfortable| place| for| sleep|.| Beds| can| vary| in| size| (|t|win|,| full|,| queen|,| king|)| and| style| (|platform|,| canopy|,| adjustable|).\n", + "|1|.| **|Bed|**|:| The| focal| point| of| the| bedroom|,| a| bed| typically| consists| of| a| mattress| resting| on| a| frame|,| and| it| may| include| pillows| and| bedding|.| It| provides| a| comfortable| place| for| sleeping| and| resting|.\n", + "\n", + "|2|.| **|D|resser|**|:| A| piece| of| furniture| with| multiple| drawers|,| a| dresser| is| used| for| storing| clothes|,| accessories|,| and| personal| items|.| It| often| has| a| flat| surface| that| may| be| used| to| display| decorative| items| or| a| mirror|.\n", "\n", - "|2|.| **|Night|stand|**|:| A| small| table| or| cabinet| usually| positioned| next| to| the| bed|,| used| for| holding| items| such| as| a| lamp|,| alarm| clock|,| books|,| or| personal| belongings|.| Night|stands| often| have| drawers| or| shelves| for| added| storage|.\n", + "|3|.| **|Night|stand|**|:| Also| known| as| a| bedside| table|,| a| night|stand| is| placed| next| to| the| bed| and| typically| holds| items| like| lamps|,| books|,| alarm| clocks|,| and| personal| belongings| for| convenience| during| the| night|.\n", "\n", - "|3|.| **|D|resser|**|:| A| larger| piece| of| furniture| with| multiple| drawers| used| for| storing| clothes|,| accessories|,| and| other| personal| items|.| Dress|ers| often| have| a| flat| top| where| decorative| items|,| mirrors|,| or| personal| effects| can| be| displayed|.|" + "|These| items| contribute| to| the| functionality| and| comfort| of| the| bedroom| environment|.|" ] } ], diff --git a/docs/docs/how-tos/streaming-tokens.ipynb b/docs/docs/how-tos/streaming-tokens.ipynb index 58775aa22b..57812a4600 100644 --- a/docs/docs/how-tos/streaming-tokens.ipynb +++ b/docs/docs/how-tos/streaming-tokens.ipynb @@ -42,7 +42,7 @@ "\n", "!!! note \"Using without LangChain\"\n", "\n", - " If you need to stream LLM tokens **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/#stream_modecustom) to stream the outputs from LLM provider clients directly. Check out the [example below](#example-without-langchain) to learn more.\n", + " If you need to stream LLM tokens **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/#custom) to stream the outputs from LLM provider clients directly. Check out the [example below](#example-without-langchain) to learn more.\n", "\n", "!!! warning \"Async in Python < 3.11\"\n", " \n", diff --git a/docs/docs/how-tos/streaming.ipynb b/docs/docs/how-tos/streaming.ipynb index 671742c407..86fbb79eb3 100644 --- a/docs/docs/how-tos/streaming.ipynb +++ b/docs/docs/how-tos/streaming.ipynb @@ -5,7 +5,7 @@ "id": "76c4b04f-0c03-4321-9d40-38d12c59d088", "metadata": {}, "source": [ - "# How to stream graph outputs" + "# How to stream" ] }, { @@ -25,10 +25,10 @@ "LangGraph is built with first class support for streaming. There are several different ways to stream back outputs from a graph run:\n", "\n", "- `\"values\"`: Emit all values in the state after each step.\n", - "- `\"updates\"`: Emit only the node or task names and updates returned by the nodes or tasks after each step.\n", + "- `\"updates\"`: Emit only the node names and updates returned by the nodes after each step.\n", " If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are emitted separately.\n", - "- `\"custom\"`: Emit custom data from inside nodes or tasks using `StreamWriter`.\n", - "- [`\"messages\"`](../streaming-tokens): Emit LLM messages token-by-token together with metadata for any LLM invocations inside nodes or tasks.\n", + "- `\"custom\"`: Emit custom data from inside nodes using `StreamWriter`.\n", + "- [`\"messages\"`](../streaming-tokens): Emit LLM messages token-by-token together with metadata for any LLM invocations inside nodes.\n", "- `\"debug\"`: Emit debug events with as much information as possible for each step.\n", "\n", "You can stream outputs from the graph by using `graph.stream(..., stream_mode=)` method, e.g.:\n", @@ -178,7 +178,7 @@ "id": "f9b90850-85bf-4391-b6b7-22ad45edaa3b", "metadata": {}, "source": [ - "## stream_mode=\"values\"" + "## Stream all values in the state (stream_mode=\"values\") {#values}" ] }, { @@ -219,7 +219,7 @@ "id": "adcb1bdb-f9fa-4d42-87ce-8e25d4290883", "metadata": {}, "source": [ - "## stream_mode=\"updates\"" + "## Stream state updates from the nodes (stream_mode=\"updates\") {#updates}" ] }, { @@ -259,7 +259,7 @@ "id": "b9ed9c68-b7c5-4420-945d-84fa33fcf88f", "metadata": {}, "source": [ - "## stream_mode=\"debug\"" + "## Stream debug events (stream_mode=\"debug\") {#debug}" ] }, { @@ -301,7 +301,7 @@ "id": "6791da60-0513-43e6-b445-788dd81683bb", "metadata": {}, "source": [ - "## [stream_mode=\"messages\"](../streaming-tokens)" + "## Stream LLM tokens ([stream_mode=\"messages\"](../streaming-tokens)) {#messages}" ] }, { @@ -409,7 +409,7 @@ "id": "0d1ebeda-4498-40e0-a30a-0844cb491425", "metadata": {}, "source": [ - "## stream_mode=\"custom\"" + "## Stream custom data (stream_mode=\"custom\") {#custom}" ] }, { @@ -475,7 +475,7 @@ "id": "28e67f4d-fcab-46a8-93e2-b7bee30336c1", "metadata": {}, "source": [ - "## Multiple streaming modes" + "## Configure multiple streaming modes (stream_mode=\"custom\") {#multiple}" ] }, { diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 2c1f580a7a..99237ea2b8 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -59,10 +59,10 @@ plugins: - redirects: redirect_maps: # lib redirects - 'how-tos/stream-values.md': 'how-tos/streaming.md#stream_modevalues' - 'how-tos/stream-updates.md': 'how-tos/streaming.md#stream_modeupdates' - 'how-tos/streaming-content.md': 'how-tos/streaming.md#stream_modecustom' - 'how-tos/stream-multiple.md': 'how-tos/streaming.md#multiple-streaming-modes' + 'how-tos/stream-values.md': 'how-tos/streaming.md#values' + 'how-tos/stream-updates.md': 'how-tos/streaming.md#updates' + 'how-tos/streaming-content.md': 'how-tos/streaming.md#custom' + 'how-tos/stream-multiple.md': 'how-tos/streaming.md#multiple' 'how-tos/streaming-tokens-without-langchain.md': 'how-tos/streaming-tokens.md#example-without-langchain' 'how-tos/streaming-from-final-node.md': 'how-tos/streaming-specific-nodes.md' 'how-tos/streaming-events-from-within-tools-without-langchain.md': 'how-tos/streaming-events-from-within-tools.md#example-without-langchain' diff --git a/libs/langgraph/langgraph/pregel/__init__.py b/libs/langgraph/langgraph/pregel/__init__.py index a30d2fe58e..908ae56374 100644 --- a/libs/langgraph/langgraph/pregel/__init__.py +++ b/libs/langgraph/langgraph/pregel/__init__.py @@ -1496,7 +1496,7 @@ def stream( When used with functional API, values are emitted once at the end of the workflow. - `"updates"`: Emit only the node or task names and updates returned by the nodes or tasks after each step. If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are emitted separately. - - `"custom"`: Emit custom data using from inside nodes or tasks using `StreamWriter`. + - `"custom"`: Emit custom data from inside nodes or tasks using `StreamWriter`. - `"messages"`: Emit LLM messages token-by-token together with metadata for any LLM invocations inside nodes or tasks. - `"debug"`: Emit debug events with as much information as possible for each step. output_keys: The keys to stream, defaults to all non-context channels. @@ -1772,7 +1772,7 @@ async def astream( When used with functional API, values are emitted once at the end of the workflow. - `"updates"`: Emit only the node or task names and updates returned by the nodes or tasks after each step. If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are emitted separately. - - `"custom"`: Emit custom data using from inside nodes or tasks using `StreamWriter`. + - `"custom"`: Emit custom data from inside nodes or tasks using `StreamWriter`. - `"messages"`: Emit LLM messages token-by-token together with metadata for any LLM invocations inside nodes or tasks. - `"debug"`: Emit debug events with as much information as possible for each step. output_keys: The keys to stream, defaults to all non-context channels. From 3a9e88acbc95fcc0f09ad8d95407d2d913c6dd23 Mon Sep 17 00:00:00 2001 From: vbarda Date: Fri, 31 Jan 2025 12:30:59 -0500 Subject: [PATCH 14/14] cr --- docs/docs/concepts/high_level.md | 2 +- docs/docs/concepts/streaming.md | 12 +- docs/docs/how-tos/index.md | 2 +- .../streaming-events-from-within-tools.ipynb | 191 +++++++++--------- docs/docs/how-tos/streaming-tokens.ipynb | 2 +- docs/docs/how-tos/streaming.ipynb | 20 +- docs/mkdocs.yml | 8 +- libs/langgraph/langgraph/pregel/__init__.py | 4 +- 8 files changed, 121 insertions(+), 120 deletions(-) diff --git a/docs/docs/concepts/high_level.md b/docs/docs/concepts/high_level.md index 3124e56dc6..76c69a92af 100644 --- a/docs/docs/concepts/high_level.md +++ b/docs/docs/concepts/high_level.md @@ -19,7 +19,7 @@ LangGraph has a [persistence layer](https://langchain-ai.github.io/langgraph/con ### Streaming -LangGraph also provides support for [streaming](../how-tos/index.md#streaming) workflow / agent state to the user (or developer) over the course of execution. LangGraph supports streaming of both events ([such as feedback from a tool call](../how-tos/stream-updates.ipynb)) and [tokens from LLM calls](../how-tos/streaming-tokens.ipynb) embedded in an application. +LangGraph also provides support for [streaming](../how-tos/index.md#streaming) workflow / agent state to the user (or developer) over the course of execution. LangGraph supports streaming of both events ([such as feedback from a tool call](../how-tos/streaming.md#updates)) and [tokens from LLM calls](../how-tos/streaming-tokens.md) embedded in an application. ### Debugging and Deployment diff --git a/docs/docs/concepts/streaming.md b/docs/docs/concepts/streaming.md index 4cff014971..a97328cac2 100644 --- a/docs/docs/concepts/streaming.md +++ b/docs/docs/concepts/streaming.md @@ -7,11 +7,11 @@ LangGraph is built with first class support for streaming. There are several dif `.stream` and `.astream` are sync and async methods for streaming back outputs from a graph run. There are several different modes you can specify when calling these methods (e.g. `graph.stream(..., mode="...")): -- [`"values"`](../how-tos/stream-values.ipynb): This streams the full value of the state after each step of the graph. -- [`"updates"`](../how-tos/stream-updates.ipynb): This streams the updates to the state after each step of the graph. If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are streamed separately. -- [`"custom"`](../how-tos/streaming-content.ipynb): This streams custom data from inside your graph nodes. -- [`"messages"`](../how-tos/streaming-tokens.ipynb): This streams LLM tokens and metadata for the graph node where LLM is invoked. -- `"debug"`: This streams as much information as possible throughout the execution of the graph. +- [`"values"`](../how-tos/streaming.md#values): This streams the full value of the state after each step of the graph. +- [`"updates"`](../how-tos/streaming.md#updates): This streams the updates to the state after each step of the graph. If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are streamed separately. +- [`"custom"`](../how-tos/streaming.md#custom): This streams custom data from inside your graph nodes. +- [`"messages"`](../how-tos/streaming-tokens.md): This streams LLM tokens and metadata for the graph node where LLM is invoked. +- [`"debug"`](../how-tos/streaming.md#debug): This streams as much information as possible throughout the execution of the graph. You can also specify multiple streaming modes at the same time by passing them as a list. When you do this, the streamed outputs will be tuples `(stream_mode, data)`. For example: @@ -145,7 +145,7 @@ guide for that [here](../how-tos/streaming-tokens.ipynb). !!! warning "ASYNC IN PYTHON<=3.10" - You may fail to see events being emitted from inside a node when using `.astream_events` in Python <= 3.10. If you're using a Langchain RunnableLambda, a RunnableGenerator, or Tool asynchronously inside your node, you will have to propagate callbacks to these objects manually. This is because LangChain cannot automatically propagate callbacks to child objects in this case. Please see examples [here](../how-tos/streaming-content.ipynb) and [here](../how-tos/streaming-events-from-within-tools.ipynb). + You may fail to see events being emitted from inside a node when using `.astream_events` in Python <= 3.10. If you're using a Langchain RunnableLambda, a RunnableGenerator, or Tool asynchronously inside your node, you will have to propagate callbacks to these objects manually. This is because LangChain cannot automatically propagate callbacks to child objects in this case. Please see examples [here](../how-tos/streaming-tokens.md) and [here](../how-tos/streaming-events-from-within-tools.md). ## LangGraph Platform diff --git a/docs/docs/how-tos/index.md b/docs/docs/how-tos/index.md index 34a1e27c54..4d16c91f59 100644 --- a/docs/docs/how-tos/index.md +++ b/docs/docs/how-tos/index.md @@ -81,7 +81,7 @@ See the below guides for how-to implement human-in-the-loop workflows with the ( [Streaming](../concepts/streaming.md) is crucial for enhancing the responsiveness of applications built on LLMs. By displaying output progressively, even before a complete response is ready, streaming significantly improves user experience (UX), particularly when dealing with the latency of LLMs. -- [How to stream graph outputs](streaming.ipynb) +- [How to stream](streaming.ipynb) - [How to stream LLM tokens](streaming-tokens.ipynb) - [How to stream LLM tokens from specific nodes](streaming-specific-nodes.ipynb) - [How to stream data from within a tool](streaming-events-from-within-tools.ipynb) diff --git a/docs/docs/how-tos/streaming-events-from-within-tools.ipynb b/docs/docs/how-tos/streaming-events-from-within-tools.ipynb index cfc222755b..fca8586af2 100644 --- a/docs/docs/how-tos/streaming-events-from-within-tools.ipynb +++ b/docs/docs/how-tos/streaming-events-from-within-tools.ipynb @@ -18,7 +18,29 @@ "\n", "If your graph calls tools that use LLMs or any other streaming APIs, you might want to surface partial results during the execution of the tool, especially if the tool takes a longer time to run.\n", "\n", - "1. To stream LLM tokens generated by a tool calling an LLM you can use [`stream_mode=\"messages\"`](../streaming#stream_modemessages):\n", + "1. To stream **arbitrary** data from inside a tool you can use [`stream_mode=\"custom\"`](../streaming#custom) and `get_stream_writer()`:\n", + "\n", + " ```python\n", + " # highlight-next-line\n", + " from langgraph.config import get_stream_writer\n", + " \n", + " def tool(tool_arg: str):\n", + " writer = get_stream_writer()\n", + " for chunk in custom_data_stream():\n", + " # stream any arbitrary data\n", + " # highlight-next-line\n", + " writer(chunk)\n", + " ...\n", + " \n", + " for chunk in graph.stream(\n", + " inputs,\n", + " # highlight-next-line\n", + " stream_mode=\"custom\"\n", + " ):\n", + " print(chunk)\n", + " ```\n", + "\n", + "2. To stream LLM tokens generated by a tool calling an LLM you can use [`stream_mode=\"messages\"`](../streaming#messages):\n", "\n", " ```python\n", " from langgraph.graph import StateGraph, MessagesState\n", @@ -49,31 +71,9 @@ " print(msg)\n", " ```\n", "\n", - "2. To stream **arbitrary** data from inside a tool you can use [`stream_mode=\"custom\"`](../streaming#stream_modemessages) and `get_stream_writer()`:\n", - "\n", - " ```python\n", - " # highlight-next-line\n", - " from langgraph.config import get_stream_writer\n", - " \n", - " def tool(tool_arg: str):\n", - " writer = get_stream_writer()\n", - " for chunk in custom_data_stream():\n", - " # stream any arbitrary data\n", - " # highlight-next-line\n", - " writer(chunk)\n", - " ...\n", - " \n", - " for chunk in graph.stream(\n", - " inputs,\n", - " # highlight-next-line\n", - " stream_mode=\"custom\"\n", - " ):\n", - " print(chunk)\n", - " ```\n", - "\n", "!!! note \"Using without LangChain\"\n", "\n", - " If you need to stream data from inside tools **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/#stream_modecustom). Check out the [example below](#example-without-langchain) to learn more.\n", + " If you need to stream data from inside tools **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/#custom). Check out the [example below](#example-without-langchain) to learn more.\n", "\n", "!!! warning \"Async in Python < 3.11\"\n", " \n", @@ -138,10 +138,10 @@ }, { "cell_type": "markdown", - "id": "e3d02ebb-c2e1-4ef7-b187-810d55139317", + "id": "b4ddc3ff-5620-48de-82f0-03b9137410cf", "metadata": {}, "source": [ - "## Define the graph\n", + "## Streaming custom data\n", "\n", "We'll use a [prebuilt ReAct agent][langgraph.prebuilt.chat_agent_executor.create_react_agent] for this guide:" ] @@ -153,39 +153,26 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain_core.callbacks import Callbacks\n", "from langchain_core.tools import tool\n", - "from langchain_core.messages import AIMessageChunk\n", - "from langchain_core.runnables import RunnableConfig\n", + "from langchain_openai import ChatOpenAI\n", "\n", "from langgraph.prebuilt import create_react_agent\n", - "from langchain_openai import ChatOpenAI\n", + "from langgraph.config import get_stream_writer\n", "\n", "\n", "@tool\n", - "async def get_items(\n", - " place: str,\n", - " # Manually accept config (needed for Python <= 3.10)\n", - " # highlight-next-line\n", - " config: RunnableConfig,\n", - ") -> str:\n", + "async def get_items(place: str) -> str:\n", " \"\"\"Use this tool to list items one might find in a place you're asked about.\"\"\"\n", - " # Attention: when using async, you should be invoking the LLM using ainvoke!\n", - " # If you fail to do so, streaming will NOT work.\n", - " response = await llm.ainvoke(\n", - " [\n", - " {\n", - " \"role\": \"user\",\n", - " \"content\": (\n", - " f\"Can you tell me what kind of items i might find in the following place: '{place}'. \"\n", - " \"List at least 3 such items separating them by a comma. And include a brief description of each item.\"\n", - " ),\n", - " }\n", - " ],\n", + " # highlight-next-line\n", + " writer = get_stream_writer()\n", + "\n", + " # this can be replaced with any actual streaming logic that you might have\n", + " items = [\"books\", \"penciles\", \"pictures\"]\n", + " for chunk in items:\n", " # highlight-next-line\n", - " config,\n", - " )\n", - " return response.content\n", + " writer({\"custom_tool_data\": chunk})\n", + "\n", + " return \", \".join(items)\n", "\n", "\n", "llm = ChatOpenAI(model_name=\"gpt-4o-mini\")\n", @@ -196,51 +183,40 @@ }, { "cell_type": "markdown", - "id": "15cb55cc-b59d-4743-b6a3-13db75414d2c", + "id": "fa96d572-d15f-4f00-b629-cf25e0b4dece", "metadata": {}, "source": [ - "## Streaming LLM tokens" + "Let's now invoke our agent with an input that requires a tool call:" ] }, { "cell_type": "code", "execution_count": 4, - "id": "4c9cdad3-3e9a-444f-9d9d-eae20b8d3486", + "id": "8ae5051c-53b9-4c53-87b2-d7263cda3b7b", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Certainly|!| Here| are| three| items| you| might| find| in| a| bedroom|:\n", - "\n", - "|1|.| **|Bed|**|:| A| bed| is| a| piece| of| furniture| used| for| sleeping| or| resting|.| It| typically| consists| of| a| mattress| supported| by| a| frame| and| may| include| bedding| such| as| sheets|,| blankets|,| and| pillows| for| comfort|.\n", - "\n", - "|2|.| **|D|resser|**|:| A| dresser| is| a| storage| piece| of| furniture| with| drawers|,| used| for| organizing| clothing| and| personal| items|.| It| often| features| a| flat| surface| on| top|,| which| can| be| used| for| displaying| decorative| items| or| personal| care| products|.\n", - "\n", - "|3|.| **|Night|stand|**|:| A| night|stand| is| a| small| table| located| beside| the| bed|,| providing| a| convenient| place| to| keep| items| such| as| a| lamp|,| alarm| clock|,| books|,| or| glasses|.| It| often| includes| one| or| more| drawers| for| additional| storage|.|" + "{'custom_tool_data': 'books'}\n", + "{'custom_tool_data': 'penciles'}\n", + "{'custom_tool_data': 'pictures'}\n" ] } ], "source": [ "inputs = {\n", " \"messages\": [ # noqa\n", - " {\"role\": \"user\", \"content\": \"what items are in the bedroom?\"}\n", + " {\"role\": \"user\", \"content\": \"what items are in the office?\"}\n", " ]\n", "}\n", - "async for msg, metadata in agent.astream(\n", + "async for chunk in agent.astream(\n", " inputs,\n", " # highlight-next-line\n", - " stream_mode=\"messages\",\n", + " stream_mode=\"custom\",\n", "):\n", - " if (\n", - " isinstance(msg, AIMessageChunk)\n", - " and msg.content\n", - " # Stream all messages from the tool node\n", - " # highlight-next-line\n", - " and metadata[\"langgraph_node\"] == \"tools\"\n", - " ):\n", - " print(msg.content, end=\"|\", flush=True)" + " print(chunk)" ] }, { @@ -248,7 +224,7 @@ "id": "6d8fa9fc-19af-47d6-9031-ee1720c51aa2", "metadata": {}, "source": [ - "## Streaming custom data" + "## Streaming LLM tokens" ] }, { @@ -258,22 +234,34 @@ "metadata": {}, "outputs": [], "source": [ - "from langgraph.config import get_stream_writer\n", + "from langchain_core.messages import AIMessageChunk\n", + "from langchain_core.runnables import RunnableConfig\n", "\n", "\n", "@tool\n", - "async def get_items(place: str) -> str:\n", - " \"\"\"Use this tool to list items one might find in a place you're asked about.\"\"\"\n", + "async def get_items(\n", + " place: str,\n", + " # Manually accept config (needed for Python <= 3.10)\n", " # highlight-next-line\n", - " writer = get_stream_writer()\n", - "\n", - " # this can be replaced with any actual streaming logic that you might have\n", - " items = [\"books\", \"penciles\", \"pictures\"]\n", - " for chunk in items:\n", + " config: RunnableConfig,\n", + ") -> str:\n", + " \"\"\"Use this tool to list items one might find in a place you're asked about.\"\"\"\n", + " # Attention: when using async, you should be invoking the LLM using ainvoke!\n", + " # If you fail to do so, streaming will NOT work.\n", + " response = await llm.ainvoke(\n", + " [\n", + " {\n", + " \"role\": \"user\",\n", + " \"content\": (\n", + " f\"Can you tell me what kind of items i might find in the following place: '{place}'. \"\n", + " \"List at least 3 such items separating them by a comma. And include a brief description of each item.\"\n", + " ),\n", + " }\n", + " ],\n", " # highlight-next-line\n", - " writer({\"custom_tool_data\": chunk})\n", - "\n", - " return \", \".join(items)\n", + " config,\n", + " )\n", + " return response.content\n", "\n", "\n", "tools = [get_items]\n", @@ -284,31 +272,42 @@ { "cell_type": "code", "execution_count": 6, - "id": "8ae5051c-53b9-4c53-87b2-d7263cda3b7b", + "id": "4c9cdad3-3e9a-444f-9d9d-eae20b8d3486", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "{'custom_tool_data': 'books'}\n", - "{'custom_tool_data': 'penciles'}\n", - "{'custom_tool_data': 'pictures'}\n" + "Certainly|!| Here| are| three| items| you| might| find| in| a| bedroom|:\n", + "\n", + "|1|.| **|Bed|**|:| The| central| piece| of| furniture| in| a| bedroom|,| typically| consisting| of| a| mattress| supported| by| a| frame|.| It| is| designed| for| sleeping| and| can| vary| in| size| from| twin| to| king|.| Beds| often| have| bedding|,| including| sheets|,| pillows|,| and| comfort|ers|,| to| enhance| comfort|.\n", + "\n", + "|2|.| **|D|resser|**|:| A| piece| of| furniture| with| drawers| used| for| storing| clothing| and| personal| items|.| Dress|ers| often| have| a| flat| surface| on| top|,| which| can| be| used| for| decorative| items|,| a| mirror|,| or| personal| accessories|.| They| help| keep| the| bedroom| organized| and| clutter|-free|.\n", + "\n", + "|3|.| **|Night|stand|**|:| A| small| table| or| cabinet| placed| beside| the| bed|,| used| for| holding| items| such| as| a| lamp|,| alarm| clock|,| books|,| or| personal| items|.| Night|stands| provide| convenience| for| easy| access| to| essentials| during| the| night|,| adding| functionality| and| style| to| the| bedroom| decor|.|" ] } ], "source": [ "inputs = {\n", " \"messages\": [ # noqa\n", - " {\"role\": \"user\", \"content\": \"what items are in the office?\"}\n", + " {\"role\": \"user\", \"content\": \"what items are in the bedroom?\"}\n", " ]\n", "}\n", - "async for chunk in agent.astream(\n", + "async for msg, metadata in agent.astream(\n", " inputs,\n", " # highlight-next-line\n", - " stream_mode=\"custom\",\n", + " stream_mode=\"messages\",\n", "):\n", - " print(chunk)" + " if (\n", + " isinstance(msg, AIMessageChunk)\n", + " and msg.content\n", + " # Stream all messages from the tool node\n", + " # highlight-next-line\n", + " and metadata[\"langgraph_node\"] == \"tools\"\n", + " ):\n", + " print(msg.content, end=\"|\", flush=True)" ] }, { @@ -443,11 +442,13 @@ "text": [ "Sure|!| Here| are| three| common| items| you| might| find| in| a| bedroom|:\n", "\n", - "|1|.| **|Bed|**|:| A| piece| of| furniture| typically| consisting| of| a| mattress| on| a| frame|,| providing| a| comfortable| place| for| sleep|.| Beds| can| vary| in| size| (|t|win|,| full|,| queen|,| king|)| and| style| (|platform|,| canopy|,| adjustable|).\n", + "|1|.| **|Bed|**|:| The| focal| point| of| the| bedroom|,| a| bed| typically| consists| of| a| mattress| resting| on| a| frame|,| and| it| may| include| pillows| and| bedding|.| It| provides| a| comfortable| place| for| sleeping| and| resting|.\n", + "\n", + "|2|.| **|D|resser|**|:| A| piece| of| furniture| with| multiple| drawers|,| a| dresser| is| used| for| storing| clothes|,| accessories|,| and| personal| items|.| It| often| has| a| flat| surface| that| may| be| used| to| display| decorative| items| or| a| mirror|.\n", "\n", - "|2|.| **|Night|stand|**|:| A| small| table| or| cabinet| usually| positioned| next| to| the| bed|,| used| for| holding| items| such| as| a| lamp|,| alarm| clock|,| books|,| or| personal| belongings|.| Night|stands| often| have| drawers| or| shelves| for| added| storage|.\n", + "|3|.| **|Night|stand|**|:| Also| known| as| a| bedside| table|,| a| night|stand| is| placed| next| to| the| bed| and| typically| holds| items| like| lamps|,| books|,| alarm| clocks|,| and| personal| belongings| for| convenience| during| the| night|.\n", "\n", - "|3|.| **|D|resser|**|:| A| larger| piece| of| furniture| with| multiple| drawers| used| for| storing| clothes|,| accessories|,| and| other| personal| items|.| Dress|ers| often| have| a| flat| top| where| decorative| items|,| mirrors|,| or| personal| effects| can| be| displayed|.|" + "|These| items| contribute| to| the| functionality| and| comfort| of| the| bedroom| environment|.|" ] } ], diff --git a/docs/docs/how-tos/streaming-tokens.ipynb b/docs/docs/how-tos/streaming-tokens.ipynb index 58775aa22b..57812a4600 100644 --- a/docs/docs/how-tos/streaming-tokens.ipynb +++ b/docs/docs/how-tos/streaming-tokens.ipynb @@ -42,7 +42,7 @@ "\n", "!!! note \"Using without LangChain\"\n", "\n", - " If you need to stream LLM tokens **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/#stream_modecustom) to stream the outputs from LLM provider clients directly. Check out the [example below](#example-without-langchain) to learn more.\n", + " If you need to stream LLM tokens **without using LangChain**, you can use [`stream_mode=\"custom\"`](../streaming/#custom) to stream the outputs from LLM provider clients directly. Check out the [example below](#example-without-langchain) to learn more.\n", "\n", "!!! warning \"Async in Python < 3.11\"\n", " \n", diff --git a/docs/docs/how-tos/streaming.ipynb b/docs/docs/how-tos/streaming.ipynb index 671742c407..86fbb79eb3 100644 --- a/docs/docs/how-tos/streaming.ipynb +++ b/docs/docs/how-tos/streaming.ipynb @@ -5,7 +5,7 @@ "id": "76c4b04f-0c03-4321-9d40-38d12c59d088", "metadata": {}, "source": [ - "# How to stream graph outputs" + "# How to stream" ] }, { @@ -25,10 +25,10 @@ "LangGraph is built with first class support for streaming. There are several different ways to stream back outputs from a graph run:\n", "\n", "- `\"values\"`: Emit all values in the state after each step.\n", - "- `\"updates\"`: Emit only the node or task names and updates returned by the nodes or tasks after each step.\n", + "- `\"updates\"`: Emit only the node names and updates returned by the nodes after each step.\n", " If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are emitted separately.\n", - "- `\"custom\"`: Emit custom data from inside nodes or tasks using `StreamWriter`.\n", - "- [`\"messages\"`](../streaming-tokens): Emit LLM messages token-by-token together with metadata for any LLM invocations inside nodes or tasks.\n", + "- `\"custom\"`: Emit custom data from inside nodes using `StreamWriter`.\n", + "- [`\"messages\"`](../streaming-tokens): Emit LLM messages token-by-token together with metadata for any LLM invocations inside nodes.\n", "- `\"debug\"`: Emit debug events with as much information as possible for each step.\n", "\n", "You can stream outputs from the graph by using `graph.stream(..., stream_mode=)` method, e.g.:\n", @@ -178,7 +178,7 @@ "id": "f9b90850-85bf-4391-b6b7-22ad45edaa3b", "metadata": {}, "source": [ - "## stream_mode=\"values\"" + "## Stream all values in the state (stream_mode=\"values\") {#values}" ] }, { @@ -219,7 +219,7 @@ "id": "adcb1bdb-f9fa-4d42-87ce-8e25d4290883", "metadata": {}, "source": [ - "## stream_mode=\"updates\"" + "## Stream state updates from the nodes (stream_mode=\"updates\") {#updates}" ] }, { @@ -259,7 +259,7 @@ "id": "b9ed9c68-b7c5-4420-945d-84fa33fcf88f", "metadata": {}, "source": [ - "## stream_mode=\"debug\"" + "## Stream debug events (stream_mode=\"debug\") {#debug}" ] }, { @@ -301,7 +301,7 @@ "id": "6791da60-0513-43e6-b445-788dd81683bb", "metadata": {}, "source": [ - "## [stream_mode=\"messages\"](../streaming-tokens)" + "## Stream LLM tokens ([stream_mode=\"messages\"](../streaming-tokens)) {#messages}" ] }, { @@ -409,7 +409,7 @@ "id": "0d1ebeda-4498-40e0-a30a-0844cb491425", "metadata": {}, "source": [ - "## stream_mode=\"custom\"" + "## Stream custom data (stream_mode=\"custom\") {#custom}" ] }, { @@ -475,7 +475,7 @@ "id": "28e67f4d-fcab-46a8-93e2-b7bee30336c1", "metadata": {}, "source": [ - "## Multiple streaming modes" + "## Configure multiple streaming modes (stream_mode=\"custom\") {#multiple}" ] }, { diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 2c1f580a7a..99237ea2b8 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -59,10 +59,10 @@ plugins: - redirects: redirect_maps: # lib redirects - 'how-tos/stream-values.md': 'how-tos/streaming.md#stream_modevalues' - 'how-tos/stream-updates.md': 'how-tos/streaming.md#stream_modeupdates' - 'how-tos/streaming-content.md': 'how-tos/streaming.md#stream_modecustom' - 'how-tos/stream-multiple.md': 'how-tos/streaming.md#multiple-streaming-modes' + 'how-tos/stream-values.md': 'how-tos/streaming.md#values' + 'how-tos/stream-updates.md': 'how-tos/streaming.md#updates' + 'how-tos/streaming-content.md': 'how-tos/streaming.md#custom' + 'how-tos/stream-multiple.md': 'how-tos/streaming.md#multiple' 'how-tos/streaming-tokens-without-langchain.md': 'how-tos/streaming-tokens.md#example-without-langchain' 'how-tos/streaming-from-final-node.md': 'how-tos/streaming-specific-nodes.md' 'how-tos/streaming-events-from-within-tools-without-langchain.md': 'how-tos/streaming-events-from-within-tools.md#example-without-langchain' diff --git a/libs/langgraph/langgraph/pregel/__init__.py b/libs/langgraph/langgraph/pregel/__init__.py index a30d2fe58e..908ae56374 100644 --- a/libs/langgraph/langgraph/pregel/__init__.py +++ b/libs/langgraph/langgraph/pregel/__init__.py @@ -1496,7 +1496,7 @@ def stream( When used with functional API, values are emitted once at the end of the workflow. - `"updates"`: Emit only the node or task names and updates returned by the nodes or tasks after each step. If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are emitted separately. - - `"custom"`: Emit custom data using from inside nodes or tasks using `StreamWriter`. + - `"custom"`: Emit custom data from inside nodes or tasks using `StreamWriter`. - `"messages"`: Emit LLM messages token-by-token together with metadata for any LLM invocations inside nodes or tasks. - `"debug"`: Emit debug events with as much information as possible for each step. output_keys: The keys to stream, defaults to all non-context channels. @@ -1772,7 +1772,7 @@ async def astream( When used with functional API, values are emitted once at the end of the workflow. - `"updates"`: Emit only the node or task names and updates returned by the nodes or tasks after each step. If multiple updates are made in the same step (e.g. multiple nodes are run) then those updates are emitted separately. - - `"custom"`: Emit custom data using from inside nodes or tasks using `StreamWriter`. + - `"custom"`: Emit custom data from inside nodes or tasks using `StreamWriter`. - `"messages"`: Emit LLM messages token-by-token together with metadata for any LLM invocations inside nodes or tasks. - `"debug"`: Emit debug events with as much information as possible for each step. output_keys: The keys to stream, defaults to all non-context channels.