From 5e9ba5f02c00149b58496fa2915ccffc0e664cfd Mon Sep 17 00:00:00 2001 From: Andrew Maguire Date: Wed, 25 Oct 2023 17:46:14 +0100 Subject: [PATCH 01/11] add blog of video about project being completed (#3719) --- .../index.mdx | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 docs/blog/2023-10-25-open-assistant-is-completed/index.mdx diff --git a/docs/blog/2023-10-25-open-assistant-is-completed/index.mdx b/docs/blog/2023-10-25-open-assistant-is-completed/index.mdx new file mode 100644 index 0000000000..175c04e784 --- /dev/null +++ b/docs/blog/2023-10-25-open-assistant-is-completed/index.mdx @@ -0,0 +1,17 @@ +--- +title: OpenAssistant is Completed! +description: OpenAssistant is Completed! +authors: [yk] +tags: [open-assistant, youtube] +image: https://img.youtube.com/vi/gqtmUHhaplo/0.jpg +--- + +import ReactPlayer from "react-player"; + + + + From 7558fa8ed3f5a63df9cdf845cbdd0c86c995b51b Mon Sep 17 00:00:00 2001 From: Andrew Maguire Date: Tue, 7 Nov 2023 22:40:44 +0000 Subject: [PATCH 02/11] add note to readme about project being completed (#3724) --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index b0bb4562d6..054fb65ef2 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,10 @@ +
+

:memo: NOTE: OpenAssistant is completed, and the project is now finished. Thank you to everyone who contributed! Check out our blog post for more information.

+
+
![GitHub Repo stars](https://img.shields.io/github/stars/LAION-AI/Open-Assistant?style=social) From de1f5c386b41571597e20f07f10591d2fa36e7b7 Mon Sep 17 00:00:00 2001 From: someone13574 <81528246+someone13574@users.noreply.github.com> Date: Sun, 12 Nov 2023 11:40:15 -0500 Subject: [PATCH 03/11] Update docs for current project status. (#3730) This PR updates the FAQ, which is currently very out of date, to the current status of the project. It also adds a notice to the introduction page in the docs that the project has concluded. Feel free to make any changes if it isn't worded how you would like. --- docs/docs/faq.md | 186 +++++++-------------------------------------- docs/docs/intro.md | 6 ++ 2 files changed, 35 insertions(+), 157 deletions(-) diff --git a/docs/docs/faq.md b/docs/docs/faq.md index 91c277cb66..8566b9dfe2 100644 --- a/docs/docs/faq.md +++ b/docs/docs/faq.md @@ -15,12 +15,12 @@ In this page, there are some of the most frequently asked questions. -We have released candidate supervised finetuning (SFT) models using both Pythia -and LLaMa, as well as candidate reward models for reinforcement learning from -human feedback training using Pythia, which you can try, and are beginning the -process of applying (RLHF). We have also released the first version of the -OpenAssistant Conversations dataset -[here](https://huggingface.co/datasets/OpenAssistant/oasst1). +This project has concluded. We have released supervised finetuning (SFT) models +using Llama 2, LLaMa, Falcon, Pythia, and StabeLM as well as reinforcement +learning from human feedback trained models and reward models, all of which are +available at [here](https://huggingface.co/OpenAssistant). In addition to our +models, we have released three datasets from OpenAssistant conversations, and a +[research paper](https://arxiv.org/abs/2304.07327). @@ -31,9 +31,8 @@ OpenAssistant Conversations dataset -You can play with our best candidate model -[here](https://open-assistant.io/chat) and provide thumbs up/down responses to -help us improve the model in future! +Our online demonstration is no longer available, but the models remain available +to download [here](https://huggingface.co/OpenAssistant). @@ -44,37 +43,18 @@ help us improve the model in future! -The candidate Pythia SFT models are +All of our models are [available on HuggingFace](https://huggingface.co/OpenAssistant) and can be -loaded via the HuggingFace Transformers library. As such you may be able to use -them with sufficient hardware. There are also spaces on HF which can be used to -chat with the OA candidate without your own hardware. However, these models are -not final and can produce poor or undesirable outputs. +loaded via the HuggingFace Transformers library or other runners if converted. +As such you may be able to use them with sufficient hardware. There are also +spaces on HF which can be used to chat with the OA candidate without your own +hardware. However, some of these models are not final and can produce poor or +undesirable outputs. -LLaMa SFT models cannot be released directly due to Meta's license but XOR +LLaMa (v1) SFT models cannot be released directly due to Meta's license but XOR weights are released on the HuggingFace org. Follow the process in the README -there to obtain a full model from these XOR weights. - - - -
- - -### Is there an API available? - - - -There is no API currently available for Open Assistant. Any mention of an API in -documentation is referencing the website's internal API. We understand that an -API is a highly requested feature, but unfortunately, we can't provide one at -this time due to a couple of reasons. Firstly, the inference system is already -under high load and running off of compute from our sponsors. Secondly, the -project's primary goal is currently data collection and model training, not -providing a product. - -However, if you're looking to run inference, you can host the model yourself -either on your own hardware or with a cloud provider. We appreciate your -understanding and patience as we continue to develop this project. +there to obtain a full model from these XOR weights. Llama 2 models are not +required to be XORed.
@@ -102,15 +82,13 @@ inference setup and UI locally unless you wish to assist in development. All Open Assistant code is licensed under Apache 2.0. This means it is available for a wide range of uses including commercial use. -The Open Assistant Pythia based models are released as full weights and will be -licensed under the Apache 2.0 license. - -The Open Assistant LLaMa based models will be released only as delta weights -meaning you will need the original LLaMa weights to use them, and the license -restrictions will therefore be those placed on the LLaMa weights. +Open Assistant models are released under the license of their respective base +models, be that Llama 2, Falcon, Pythia, or StableLM. LLaMa (not 2) models are +only released as XOR weights, meaning you will need the original LLaMa weights +to use them. -The Open Assistant data is released under a Creative Commons license allowing a -wide range of uses including commercial use. +The Open Assistant data is released under Apache-2.0 allowing a wide range of +uses including commercial use. @@ -138,9 +116,8 @@ you to everyone who has taken part! -The model code, weights, and data are free. We are additionally hosting a free -public instance of our best current model for as long as we can thanks to -compute donation from Stability AI via LAION! +The model code, weights, and data are free. Our free public instance of our best +models is not longer available due to the project's conclusion. @@ -151,10 +128,9 @@ compute donation from Stability AI via LAION! -The current smallest (Pythia) model is 12B parameters and is challenging to run -on consumer hardware, but can run on a single professional GPU. In future there -may be smaller models and we hope to make progress on methods like integer -quantisation which can help run the model on smaller hardware. +The current smallest models are 7B parameters and are challenging to run on +consumer hardware, but can run on a single professional GPU or be quantized to +run on more widely available hardware. @@ -165,13 +141,7 @@ quantisation which can help run the model on smaller hardware. -If you want to help in the data collection for training the model, go to the -website [https://open-assistant.io/](https://open-assistant.io/). - -If you want to contribute code, take a look at the -[tasks in GitHub](https://github.com/orgs/LAION-AI/projects/3) and comment on an -issue stating your wish to be assigned. You can also take a look at this -[contributing guide](https://github.com/LAION-AI/Open-Assistant/blob/main/CONTRIBUTING.md). +This project has now concluded. @@ -190,104 +160,6 @@ well as accelerate, DeepSpeed, bitsandbytes, NLTK, and other libraries. -## Questions about the data collection website - -
- - -### Can I use ChatGPT to help in training Open Assistant, for instance, by generating answers? - - - -No, it is against their terms of service to use it to help train other models. -See -[this issue](https://github.com/LAION-AI/Open-Assistant/issues/471#issuecomment-1374392299). -ChatGPT-like answers will be removed. - -
- -
- - -### What should I do if I don't know how to complete the task as an assistant? - - -Skip it. -
- -
- - -### Should I fact check the answers by the assistant? - - - -Yes, you should try. If you are not sure, skip the task. - -
- -
- - -### How can I see my score? - - - -In your [account settings](https://open-assistant.io/account). - -
- -
- - -### Can we see how many data points have been collected? - - - -You can see a regularly updated interface at -[https://open-assistant.io/stats](https://open-assistant.io/stats). - -
- -
- - -### How do I write and label prompts? - - - -Check the -[guidelines](https://projects.laion.ai/Open-Assistant/docs/guides/guidelines). - -
- -
- - -### Where can I report a bug or create a new feature request? - - - -In the [GitHub issues](https://github.com/LAION-AI/Open-Assistant/issues). - -
- -
- - -### Why am I not allowed to write about this topic, even though it isn't illegal? - - - -We want to ensure that the Open Assistant dataset is as accessible as possible. -As such, it's necessary to avoid any harmful or offensive content that could be -grounds for removal on sites such as Hugging Face. Likewise, we want the model -to be trained to reject as few questions as possible, so it's important to not -include prompts that leave the assistant with no other choice but to refuse in -order to avoid the generation of harmful content. - -
- ## Questions about the development process
diff --git a/docs/docs/intro.md b/docs/docs/intro.md index 326502bfe3..98f50762ca 100644 --- a/docs/docs/intro.md +++ b/docs/docs/intro.md @@ -1,3 +1,9 @@ +# Notice + +**Open Assistant has now concluded.** Please see +[this video](https://www.youtube.com/watch?v=gqtmUHhaplo) for more information. +Thanks you to all those who made this project possible. + # Introduction > The FAQ page is available at From 1f621d361a6b1c7cca9f3927c8d196fed138a29c Mon Sep 17 00:00:00 2001 From: Yannic Kilcher Date: Sat, 25 Nov 2023 13:59:28 +0100 Subject: [PATCH 04/11] added bye page --- website/next.config.js | 19 ++++++++++++++ website/src/pages/bye.tsx | 55 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+) create mode 100644 website/src/pages/bye.tsx diff --git a/website/next.config.js b/website/next.config.js index 911de0c25c..ffb2762725 100644 --- a/website/next.config.js +++ b/website/next.config.js @@ -31,6 +31,25 @@ const nextConfig = { ignoreDuringBuilds: true, }, async redirects() { + if (process.env.BYE === "true") { + return [ + { + source: "/", + destination: "/bye", + permanent: false, + }, + { + source: "/chat", + destination: "/bye", + permanent: false, + }, + { + source: "/contributors", + destination: "https://ykilcher.com/oa-contributors", + permanent: false, + }, + ]; + } if (process.env.MAINTENANCE_MODE !== "true") { return []; } diff --git a/website/src/pages/bye.tsx b/website/src/pages/bye.tsx new file mode 100644 index 0000000000..58a495c4a9 --- /dev/null +++ b/website/src/pages/bye.tsx @@ -0,0 +1,55 @@ +import Image from "next/image"; +import { Container } from "src/components/Container"; +export { getStaticProps } from "src/lib/defaultServerSideProps"; + +const ByePage = () => { + return ( +
+ +
+
+ temp-image +
+
+
+

OpenAssistant has finished!

+

+ OpenAssistant collected data from over 13'000 humans and released it to the public. + Data, models, and code are publicly available. +

+

Links:

+ +

If you're looking to support other open-data projects, check out these:

+ +
+
+
+
+
+ ); +}; + +export default ByePage; From fcd2453dcdcf3920dcf33698c7b15f0752553807 Mon Sep 17 00:00:00 2001 From: Yannic Kilcher Date: Sat, 25 Nov 2023 14:20:31 +0100 Subject: [PATCH 05/11] pre-commit --- website/src/pages/bye.tsx | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/website/src/pages/bye.tsx b/website/src/pages/bye.tsx index 58a495c4a9..02ea848d50 100644 --- a/website/src/pages/bye.tsx +++ b/website/src/pages/bye.tsx @@ -14,8 +14,8 @@ const ByePage = () => {

OpenAssistant has finished!

- OpenAssistant collected data from over 13'000 humans and released it to the public. - Data, models, and code are publicly available. + OpenAssistant collected data from over 13'000 humans and released it to the public. Data, models, + and code are publicly available.

Links:

-

If you're looking to support other open-data projects, check out these:

+

+ If you're looking to support other open-data projects, check out these: +

  • LMSYS Chatbot Arena From 5c0efa6e564d519f448c323d643fe0eb8bb6e37b Mon Sep 17 00:00:00 2001 From: Yannic Kilcher Date: Sat, 25 Nov 2023 15:31:29 +0100 Subject: [PATCH 06/11] added dashboard redirect --- website/next.config.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/website/next.config.js b/website/next.config.js index ffb2762725..0fee93b73f 100644 --- a/website/next.config.js +++ b/website/next.config.js @@ -38,6 +38,11 @@ const nextConfig = { destination: "/bye", permanent: false, }, + { + source: "/dashboard", + destination: "/bye", + permanent: false, + }, { source: "/chat", destination: "/bye", From 46520c368858feb9a3ae07ebeb47d7bfa729da7b Mon Sep 17 00:00:00 2001 From: Yannic Kilcher Date: Sat, 25 Nov 2023 15:34:09 +0100 Subject: [PATCH 07/11] deployment workflows for bye --- .github/workflows/deploy-to-node.yaml | 1 + ansible/deploy-to-node.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/deploy-to-node.yaml b/.github/workflows/deploy-to-node.yaml index a34e550334..f04484c6ba 100644 --- a/.github/workflows/deploy-to-node.yaml +++ b/.github/workflows/deploy-to-node.yaml @@ -81,6 +81,7 @@ jobs: BACKEND_CORS_ORIGINS: ${{ vars.BACKEND_CORS_ORIGINS }} WEB_INFERENCE_SERVER_HOST: ${{ vars.WEB_INFERENCE_SERVER_HOST }} WEB_ENABLE_CHAT: ${{ vars.WEB_ENABLE_CHAT }} + WEB_BYE: ${{ vars.WEB_BYE }} WEB_ENABLE_DRAFTS_WITH_PLUGINS: ${{ vars.WEB_ENABLE_DRAFTS_WITH_PLUGINS }} WEB_NUM_GENERATED_DRAFTS: ${{ vars.WEB_NUM_GENERATED_DRAFTS }} WEB_CURRENT_ANNOUNCEMENT: ${{ vars.WEB_CURRENT_ANNOUNCEMENT }} diff --git a/ansible/deploy-to-node.yaml b/ansible/deploy-to-node.yaml index f36e020710..5fe6114c8b 100644 --- a/ansible/deploy-to-node.yaml +++ b/ansible/deploy-to-node.yaml @@ -284,6 +284,7 @@ INFERENCE_SERVER_API_KEY: "{{ lookup('ansible.builtin.env', 'WEB_INFERENCE_SERVER_API_KEY') }}" ENABLE_CHAT: "{{ lookup('ansible.builtin.env', 'WEB_ENABLE_CHAT') }}" + BYE: "{{ lookup('ansible.builtin.env', 'BYE') }}" ENABLE_DRAFTS_WITH_PLUGINS: "{{ lookup('ansible.builtin.env', 'WEB_ENABLE_DRAFTS_WITH_PLUGINS')}}" From 29c50eee601b0a93dca61f44f0869945d29d0fc7 Mon Sep 17 00:00:00 2001 From: Yannic Kilcher Date: Sat, 25 Nov 2023 16:10:19 +0100 Subject: [PATCH 08/11] ansible fix --- ansible/deploy-to-node.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ansible/deploy-to-node.yaml b/ansible/deploy-to-node.yaml index 5fe6114c8b..33898bc19c 100644 --- a/ansible/deploy-to-node.yaml +++ b/ansible/deploy-to-node.yaml @@ -284,7 +284,7 @@ INFERENCE_SERVER_API_KEY: "{{ lookup('ansible.builtin.env', 'WEB_INFERENCE_SERVER_API_KEY') }}" ENABLE_CHAT: "{{ lookup('ansible.builtin.env', 'WEB_ENABLE_CHAT') }}" - BYE: "{{ lookup('ansible.builtin.env', 'BYE') }}" + BYE: "{{ lookup('ansible.builtin.env', 'WEB_BYE') }}" ENABLE_DRAFTS_WITH_PLUGINS: "{{ lookup('ansible.builtin.env', 'WEB_ENABLE_DRAFTS_WITH_PLUGINS')}}" From ca7dc792b238065fafd72886d6fd2c76f58a39bd Mon Sep 17 00:00:00 2001 From: Yannic Kilcher Date: Sat, 25 Nov 2023 16:46:25 +0100 Subject: [PATCH 09/11] apparently, prod ignores redirects or process env --- website/next.config.js | 24 ------------------------ website/src/hooks/env/BrowserEnv.ts | 1 + website/src/pages/api/config.ts | 1 + website/src/pages/bye.tsx | 3 ++- website/src/pages/chat/[id].tsx | 10 +++++++++- website/src/pages/contributors.tsx | 9 +++++++++ website/src/pages/dashboard.tsx | 9 ++++++++- website/src/pages/index.tsx | 7 +++++++ website/src/types/Config.ts | 1 + website/types/env.d.ts | 1 + 10 files changed, 39 insertions(+), 27 deletions(-) create mode 100644 website/src/pages/contributors.tsx diff --git a/website/next.config.js b/website/next.config.js index 0fee93b73f..911de0c25c 100644 --- a/website/next.config.js +++ b/website/next.config.js @@ -31,30 +31,6 @@ const nextConfig = { ignoreDuringBuilds: true, }, async redirects() { - if (process.env.BYE === "true") { - return [ - { - source: "/", - destination: "/bye", - permanent: false, - }, - { - source: "/dashboard", - destination: "/bye", - permanent: false, - }, - { - source: "/chat", - destination: "/bye", - permanent: false, - }, - { - source: "/contributors", - destination: "https://ykilcher.com/oa-contributors", - permanent: false, - }, - ]; - } if (process.env.MAINTENANCE_MODE !== "true") { return []; } diff --git a/website/src/hooks/env/BrowserEnv.ts b/website/src/hooks/env/BrowserEnv.ts index 4d863c0d9a..8ed2f357f9 100644 --- a/website/src/hooks/env/BrowserEnv.ts +++ b/website/src/hooks/env/BrowserEnv.ts @@ -1,6 +1,7 @@ import { createContext, useContext } from "react"; export interface BrowserConfig { + BYE: boolean; ENABLE_CHAT: boolean; ENABLE_DRAFTS_WITH_PLUGINS: boolean; NUM_GENERATED_DRAFTS: number; diff --git a/website/src/pages/api/config.ts b/website/src/pages/api/config.ts index 36c4a6b53d..33c9d8c70a 100644 --- a/website/src/pages/api/config.ts +++ b/website/src/pages/api/config.ts @@ -4,6 +4,7 @@ import { BrowserConfig } from "src/types/Config"; // don't put sensitive information here const config: BrowserConfig = { + BYE: boolean(process.env.BYE), ENABLE_CHAT: boolean(process.env.ENABLE_CHAT), ENABLE_DRAFTS_WITH_PLUGINS: boolean(process.env.ENABLE_DRAFTS_WITH_PLUGINS), NUM_GENERATED_DRAFTS: Number(process.env.NUM_GENERATED_DRAFTS), diff --git a/website/src/pages/bye.tsx b/website/src/pages/bye.tsx index 02ea848d50..0ad9018b04 100644 --- a/website/src/pages/bye.tsx +++ b/website/src/pages/bye.tsx @@ -1,4 +1,5 @@ import Image from "next/image"; +import Link from "next/link"; import { Container } from "src/components/Container"; export { getStaticProps } from "src/lib/defaultServerSideProps"; @@ -20,7 +21,7 @@ const ByePage = () => {

    Links:

    • - List of contributors + List of contributors
    • Paper diff --git a/website/src/pages/chat/[id].tsx b/website/src/pages/chat/[id].tsx index b8bb0b81db..8e3cf55f75 100644 --- a/website/src/pages/chat/[id].tsx +++ b/website/src/pages/chat/[id].tsx @@ -8,9 +8,12 @@ import { get } from "src/lib/api"; import { ModelInfo, PluginEntry } from "src/types/Chat"; export { getServerSideProps } from "src/lib/defaultServerSideProps"; import useSWRImmutable from "swr/immutable"; +import { useBrowserConfig } from "src/hooks/env/BrowserEnv"; const Chat = () => { - const { query } = useRouter(); + const { BYE } = useBrowserConfig(); + const router = useRouter(); + const { query } = router; const id = query.id as string; const { t } = useTranslation(["common", "chat"]); const { data: modelInfos } = useSWRImmutable("/api/chat/models", get, { @@ -20,6 +23,11 @@ const Chat = () => { keepPreviousData: true, }); + if (BYE) { + router.push("/bye"); + return null; + } + return ( <> diff --git a/website/src/pages/contributors.tsx b/website/src/pages/contributors.tsx new file mode 100644 index 0000000000..98610c4fb4 --- /dev/null +++ b/website/src/pages/contributors.tsx @@ -0,0 +1,9 @@ +import { useRouter } from "next/router"; + +const ContributorsPage = () => { + const router = useRouter(); + router.push("https://ykilcher.com/oa-contributors"); + return null; +}; + +export default ContributorsPage; diff --git a/website/src/pages/dashboard.tsx b/website/src/pages/dashboard.tsx index 62d2bbdf99..2f6d9c0e84 100644 --- a/website/src/pages/dashboard.tsx +++ b/website/src/pages/dashboard.tsx @@ -14,10 +14,12 @@ import { useBrowserConfig } from "src/hooks/env/BrowserEnv"; import { useCurrentLocale } from "src/hooks/locale/useCurrentLocale"; import { API_ROUTES } from "src/lib/routes"; import useSWR from "swr"; +import { useRouter } from "next/router"; const Dashboard = () => { const { t } = useTranslation(["dashboard", "common", "tasks"]); - const { ENABLE_CHAT } = useBrowserConfig(); + const { ENABLE_CHAT, BYE } = useBrowserConfig(); + const router = useRouter(); const lang = useCurrentLocale(); const { data } = useSWR(API_ROUTES.AVAILABLE_TASK({ lang }), get, { refreshInterval: 2 * 60 * 1000, //2 minutes @@ -55,6 +57,11 @@ const Dashboard = () => { }, }; + if (BYE) { + router.push("/bye"); + return null; + } + return ( <> diff --git a/website/src/pages/index.tsx b/website/src/pages/index.tsx index 3c33b94367..453d6b7cf7 100644 --- a/website/src/pages/index.tsx +++ b/website/src/pages/index.tsx @@ -8,8 +8,10 @@ import { CallToAction } from "src/components/CallToAction"; import { Faq } from "src/components/Faq"; import { Hero } from "src/components/Hero"; export { getDefaultServerSideProps as getStaticProps } from "src/lib/defaultServerSideProps"; +import { useBrowserConfig } from "src/hooks/env/BrowserEnv"; const Home = () => { + const { BYE } = useBrowserConfig(); const router = useRouter(); const { status } = useSession(); const { t } = useTranslation(); @@ -19,6 +21,11 @@ const Home = () => { } }, [router, status]); + if (BYE) { + router.push("/bye"); + return null; + } + return ( <> diff --git a/website/src/types/Config.ts b/website/src/types/Config.ts index 087cc55faa..37de5d9070 100644 --- a/website/src/types/Config.ts +++ b/website/src/types/Config.ts @@ -1,4 +1,5 @@ export interface BrowserConfig { + BYE: boolean; ENABLE_CHAT: boolean; ENABLE_DRAFTS_WITH_PLUGINS: boolean; // Whether draft messages should be generated if plugins are in use NUM_GENERATED_DRAFTS: number; diff --git a/website/types/env.d.ts b/website/types/env.d.ts index 58667dd643..c338c506d7 100644 --- a/website/types/env.d.ts +++ b/website/types/env.d.ts @@ -9,6 +9,7 @@ declare global { ADMIN_USERS: string; MODERATOR_USERS: string; INFERENCE_SERVER_HOST: string; + BYE: boolean; ENABLE_CHAT: boolean; ENABLE_DRAFTS_WITH_PLUGINS: boolean; NUM_GENERATED_DRAFTS: number; From e1769c102f1597cc0b53a8b915f858239d197aeb Mon Sep 17 00:00:00 2001 From: Yannic Kilcher Date: Sat, 25 Nov 2023 16:58:48 +0100 Subject: [PATCH 10/11] next build fixes --- website/src/pages/chat/[id].tsx | 7 ------- website/src/pages/chat/index.tsx | 10 ++++++++++ website/src/pages/contributors.tsx | 6 +++++- website/src/pages/dashboard.tsx | 13 +++++++------ website/src/pages/index.tsx | 11 +++++------ 5 files changed, 27 insertions(+), 20 deletions(-) diff --git a/website/src/pages/chat/[id].tsx b/website/src/pages/chat/[id].tsx index 8e3cf55f75..c55645afe1 100644 --- a/website/src/pages/chat/[id].tsx +++ b/website/src/pages/chat/[id].tsx @@ -8,10 +8,8 @@ import { get } from "src/lib/api"; import { ModelInfo, PluginEntry } from "src/types/Chat"; export { getServerSideProps } from "src/lib/defaultServerSideProps"; import useSWRImmutable from "swr/immutable"; -import { useBrowserConfig } from "src/hooks/env/BrowserEnv"; const Chat = () => { - const { BYE } = useBrowserConfig(); const router = useRouter(); const { query } = router; const id = query.id as string; @@ -23,11 +21,6 @@ const Chat = () => { keepPreviousData: true, }); - if (BYE) { - router.push("/bye"); - return null; - } - return ( <> diff --git a/website/src/pages/chat/index.tsx b/website/src/pages/chat/index.tsx index 3e2856b459..68fcdeef90 100644 --- a/website/src/pages/chat/index.tsx +++ b/website/src/pages/chat/index.tsx @@ -1,12 +1,22 @@ import Head from "next/head"; +import { useRouter } from "next/router"; import { useTranslation } from "next-i18next"; import React from "react"; import { ChatListBase } from "src/components/Chat/ChatListBase"; import { DashboardLayout } from "src/components/Layout"; export { getStaticProps } from "src/lib/defaultServerSideProps"; +import { useBrowserConfig } from "src/hooks/env/BrowserEnv"; const ChatList = () => { const { t } = useTranslation(); + const { BYE } = useBrowserConfig(); + const router = useRouter(); + + React.useEffect(() => { + if (BYE) { + router.push("/bye"); + } + }, [router, BYE]); return ( <> diff --git a/website/src/pages/contributors.tsx b/website/src/pages/contributors.tsx index 98610c4fb4..8f38f71269 100644 --- a/website/src/pages/contributors.tsx +++ b/website/src/pages/contributors.tsx @@ -1,8 +1,12 @@ import { useRouter } from "next/router"; +import { useEffect } from "react"; const ContributorsPage = () => { const router = useRouter(); - router.push("https://ykilcher.com/oa-contributors"); + useEffect(() => { + router.push("https://ykilcher.com/oa-contributors"); + }, [router]); + return null; }; diff --git a/website/src/pages/dashboard.tsx b/website/src/pages/dashboard.tsx index 2f6d9c0e84..17282b61d2 100644 --- a/website/src/pages/dashboard.tsx +++ b/website/src/pages/dashboard.tsx @@ -1,7 +1,8 @@ import { Button, Card, CardBody, Flex, Heading } from "@chakra-ui/react"; import Head from "next/head"; +import { useRouter } from "next/router"; import { useTranslation } from "next-i18next"; -import { useMemo } from "react"; +import { useEffect, useMemo } from "react"; import { LeaderboardWidget, TaskOption, WelcomeCard } from "src/components/Dashboard"; import { DashboardLayout } from "src/components/Layout"; import { get } from "src/lib/api"; @@ -14,7 +15,6 @@ import { useBrowserConfig } from "src/hooks/env/BrowserEnv"; import { useCurrentLocale } from "src/hooks/locale/useCurrentLocale"; import { API_ROUTES } from "src/lib/routes"; import useSWR from "swr"; -import { useRouter } from "next/router"; const Dashboard = () => { const { t } = useTranslation(["dashboard", "common", "tasks"]); @@ -57,10 +57,11 @@ const Dashboard = () => { }, }; - if (BYE) { - router.push("/bye"); - return null; - } + useEffect(() => { + if (BYE) { + router.push("/bye"); + } + }, [BYE, router]); return ( <> diff --git a/website/src/pages/index.tsx b/website/src/pages/index.tsx index 453d6b7cf7..60e04cd284 100644 --- a/website/src/pages/index.tsx +++ b/website/src/pages/index.tsx @@ -16,15 +16,14 @@ const Home = () => { const { status } = useSession(); const { t } = useTranslation(); useEffect(() => { + if (BYE) { + router.push("/bye"); + } + if (status === "authenticated") { router.push("/dashboard"); } - }, [router, status]); - - if (BYE) { - router.push("/bye"); - return null; - } + }, [router, status, BYE]); return ( <> From f1e6ed9526f5817531f3ab85441a40b3671ddccb Mon Sep 17 00:00:00 2001 From: Andrew Maguire Date: Sat, 6 Jan 2024 17:26:21 +0000 Subject: [PATCH 11/11] add note about oasst2 being available (#3743) --- README.md | 2 +- docs/blog/2023-10-25-open-assistant-is-completed/index.mdx | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 054fb65ef2..f8ed80496f 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@
      -

      :memo: NOTE: OpenAssistant is completed, and the project is now finished. Thank you to everyone who contributed! Check out our blog post for more information.

      +

      :memo: NOTE: OpenAssistant is completed, and the project is now finished. Thank you to everyone who contributed! Check out our blog post for more information. The final published oasst2 dataset can be found on HuggingFace at OpenAssistant/oasst2

      diff --git a/docs/blog/2023-10-25-open-assistant-is-completed/index.mdx b/docs/blog/2023-10-25-open-assistant-is-completed/index.mdx index 175c04e784..f650c1b3cf 100644 --- a/docs/blog/2023-10-25-open-assistant-is-completed/index.mdx +++ b/docs/blog/2023-10-25-open-assistant-is-completed/index.mdx @@ -14,4 +14,7 @@ import ReactPlayer from "react-player"; url="https://www.youtube.com/embed/gqtmUHhaplo" /> +The final published oasst2 dataset can be found on HuggingFace at +[OpenAssistant/oasst2](https://huggingface.co/datasets/OpenAssistant/oasst2). +