nico-martin HF Staff commited on
Commit
b77708b
·
1 Parent(s): db78b1a

added tfjs v4

Browse files
.idea/vcs.xml CHANGED
@@ -3,5 +3,6 @@
3
  <component name="VcsDirectoryMappings">
4
  <mapping directory="" vcs="Git" />
5
  <mapping directory="$PROJECT_DIR$" vcs="Git" />
 
6
  </component>
7
  </project>
 
3
  <component name="VcsDirectoryMappings">
4
  <mapping directory="" vcs="Git" />
5
  <mapping directory="$PROJECT_DIR$" vcs="Git" />
6
+ <mapping directory="$PROJECT_DIR$/transformers.js-v4/transformers.js" vcs="Git" />
7
  </component>
8
  </project>
package-lock.json CHANGED
The diff for this file is too large to render. See raw diff
 
package.json CHANGED
@@ -10,7 +10,7 @@
10
  "preview": "vite preview"
11
  },
12
  "dependencies": {
13
- "@huggingface/transformers": "../transformers.js",
14
  "@tailwindcss/typography": "^0.5.19",
15
  "lucide-react": "^0.554.0",
16
  "react": "^19.1.1",
 
10
  "preview": "vite preview"
11
  },
12
  "dependencies": {
13
+ "@huggingface/transformers": "./transformers.js-v4/transformers.js",
14
  "@tailwindcss/typography": "^0.5.19",
15
  "lucide-react": "^0.554.0",
16
  "react": "^19.1.1",
src/chat/Chat.tsx CHANGED
@@ -72,31 +72,6 @@ export default function Chat({ className = "" }: { className?: string }) {
72
  setState(State.GENERATING);
73
  await generator.runAgent(prompt);
74
 
75
- /*let nextPrompt = prompt;
76
- while (nextPrompt) {
77
- const conversation = [...messages, { role: "user", content: prompt }];
78
- setMessages(conversation);
79
-
80
- const answer: Message = {
81
- role: "assistant",
82
- content: "",
83
- };
84
-
85
- const { response, modelUsage } = await generator.generateText(
86
- settings.modelKey,
87
- conversation,
88
- [],
89
- settings.temperature,
90
- settings.enableThinking,
91
- (chunk) => {
92
- answer.content += chunk;
93
- setMessages([...conversation, answer]);
94
- }
95
- );
96
- console.log(response, modelUsage);
97
- setMessages([...conversation, { role: "assistant", content: response }]);
98
- }*/
99
-
100
  setState(State.READY);
101
  };
102
 
 
72
  setState(State.GENERATING);
73
  await generator.runAgent(prompt);
74
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  setState(State.READY);
76
  };
77
 
src/utils/context/chatSettings/ChatSettingsContext.ts CHANGED
@@ -17,5 +17,5 @@ export const DEFAULT_CHAT_SETTINGS: ChatSettings = {
17
  modelKey: Object.keys(MODELS)[0],
18
  systemPrompt: DEFAULT_SYSTEM_PROMPT,
19
  temperature: 0.7,
20
- enableThinking: true,
21
  };
 
17
  modelKey: Object.keys(MODELS)[0],
18
  systemPrompt: DEFAULT_SYSTEM_PROMPT,
19
  temperature: 0.7,
20
+ enableThinking: false,
21
  };
src/utils/context/chatSettings/ChatSettingsContextProvider.tsx CHANGED
@@ -66,7 +66,7 @@ export default function ChatSettingsContextProvider({
66
  return (
67
  <ChatSettingsContext
68
  value={{
69
- settings,
70
  setSettings,
71
  downloadedModels,
72
  openSettingsModal: () => setModalOpen(true),
 
66
  return (
67
  <ChatSettingsContext
68
  value={{
69
+ settings: hasAllSettings(settings) ? settings : null,
70
  setSettings,
71
  downloadedModels,
72
  openSettingsModal: () => setModalOpen(true),
src/utils/context/chatSettings/ChatSettingsModal.tsx CHANGED
@@ -40,8 +40,9 @@ export default function ChatSettingsModal({
40
  temperature:
41
  settings.temperature || DEFAULT_CHAT_SETTINGS.temperature,
42
  enableThinking:
43
- settings.enableThinking ||
44
- DEFAULT_CHAT_SETTINGS.enableThinking,
 
45
  }
46
  : DEFAULT_CHAT_SETTINGS
47
  }
 
40
  temperature:
41
  settings.temperature || DEFAULT_CHAT_SETTINGS.temperature,
42
  enableThinking:
43
+ settings.enableThinking === null
44
+ ? DEFAULT_CHAT_SETTINGS.enableThinking
45
+ : settings.enableThinking,
46
  }
47
  : DEFAULT_CHAT_SETTINGS
48
  }
transformers.js-v4/transformers.js ADDED
@@ -0,0 +1 @@
 
 
1
+ Subproject commit aaa8a4daba8ade4068f86479462b62716c6c7556